[DebianGIS-dev] r834 - in packages: . drawmap drawmap/branches drawmap/branches/upstream drawmap/branches/upstream/current

frankie at alioth.debian.org frankie at alioth.debian.org
Fri May 18 12:29:03 UTC 2007


Author: frankie
Date: 2007-05-18 12:29:03 +0000 (Fri, 18 May 2007)
New Revision: 834

Added:
   packages/drawmap/
   packages/drawmap/branches/
   packages/drawmap/branches/upstream/
   packages/drawmap/branches/upstream/current/
   packages/drawmap/branches/upstream/current/COPYING
   packages/drawmap/branches/upstream/current/COPYING_NOTE
   packages/drawmap/branches/upstream/current/Makefile
   packages/drawmap/branches/upstream/current/README
   packages/drawmap/branches/upstream/current/WHATS_NEW
   packages/drawmap/branches/upstream/current/attrib_codes
   packages/drawmap/branches/upstream/current/attributes
   packages/drawmap/branches/upstream/current/big_buf_io.c
   packages/drawmap/branches/upstream/current/big_buf_io_z.c
   packages/drawmap/branches/upstream/current/colors.h
   packages/drawmap/branches/upstream/current/dem.c
   packages/drawmap/branches/upstream/current/dem.h
   packages/drawmap/branches/upstream/current/dem_sdts.c
   packages/drawmap/branches/upstream/current/dlg.c
   packages/drawmap/branches/upstream/current/dlg.h
   packages/drawmap/branches/upstream/current/dlg_sdts.c
   packages/drawmap/branches/upstream/current/drawmap.1n
   packages/drawmap/branches/upstream/current/drawmap.c
   packages/drawmap/branches/upstream/current/drawmap.h
   packages/drawmap/branches/upstream/current/font_5x8.h
   packages/drawmap/branches/upstream/current/font_6x10.h
   packages/drawmap/branches/upstream/current/gtopo30.c
   packages/drawmap/branches/upstream/current/gunzip.c
   packages/drawmap/branches/upstream/current/gzip.h
   packages/drawmap/branches/upstream/current/ll2utm.1n
   packages/drawmap/branches/upstream/current/ll2utm.c
   packages/drawmap/branches/upstream/current/llsearch.1n
   packages/drawmap/branches/upstream/current/llsearch.c
   packages/drawmap/branches/upstream/current/raster.h
   packages/drawmap/branches/upstream/current/sdts2dem.1n
   packages/drawmap/branches/upstream/current/sdts2dem.c
   packages/drawmap/branches/upstream/current/sdts2dlg.1n
   packages/drawmap/branches/upstream/current/sdts2dlg.c
   packages/drawmap/branches/upstream/current/sdts_utils.c
   packages/drawmap/branches/upstream/current/sdts_utils.h
   packages/drawmap/branches/upstream/current/unblock_dem.1n
   packages/drawmap/branches/upstream/current/unblock_dem.c
   packages/drawmap/branches/upstream/current/unblock_dlg.1n
   packages/drawmap/branches/upstream/current/unblock_dlg.c
   packages/drawmap/branches/upstream/current/utilities.c
   packages/drawmap/branches/upstream/current/utm2ll.1n
   packages/drawmap/branches/upstream/current/utm2ll.c
   packages/drawmap/tags/
Log:
[svn-inject] Installing original source of drawmap

Added: packages/drawmap/branches/upstream/current/COPYING
===================================================================
--- packages/drawmap/branches/upstream/current/COPYING	                        (rev 0)
+++ packages/drawmap/branches/upstream/current/COPYING	2007-05-18 12:29:03 UTC (rev 834)
@@ -0,0 +1,339 @@
+		    GNU GENERAL PUBLIC LICENSE
+		       Version 2, June 1991
+
+ Copyright (C) 1989, 1991 Free Software Foundation, Inc.
+                          675 Mass Ave, Cambridge, MA 02139, USA
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+			    Preamble
+
+  The licenses for most software are designed to take away your
+freedom to share and change it.  By contrast, the GNU General Public
+License is intended to guarantee your freedom to share and change free
+software--to make sure the software is free for all its users.  This
+General Public License applies to most of the Free Software
+Foundation's software and to any other program whose authors commit to
+using it.  (Some other Free Software Foundation software is covered by
+the GNU Library General Public License instead.)  You can apply it to
+your programs, too.
+
+  When we speak of free software, we are referring to freedom, not
+price.  Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+this service if you wish), that you receive source code or can get it
+if you want it, that you can change the software or use pieces of it
+in new free programs; and that you know you can do these things.
+
+  To protect your rights, we need to make restrictions that forbid
+anyone to deny you these rights or to ask you to surrender the rights.
+These restrictions translate to certain responsibilities for you if you
+distribute copies of the software, or if you modify it.
+
+  For example, if you distribute copies of such a program, whether
+gratis or for a fee, you must give the recipients all the rights that
+you have.  You must make sure that they, too, receive or can get the
+source code.  And you must show them these terms so they know their
+rights.
+
+  We protect your rights with two steps: (1) copyright the software, and
+(2) offer you this license which gives you legal permission to copy,
+distribute and/or modify the software.
+
+  Also, for each author's protection and ours, we want to make certain
+that everyone understands that there is no warranty for this free
+software.  If the software is modified by someone else and passed on, we
+want its recipients to know that what they have is not the original, so
+that any problems introduced by others will not reflect on the original
+authors' reputations.
+
+  Finally, any free program is threatened constantly by software
+patents.  We wish to avoid the danger that redistributors of a free
+program will individually obtain patent licenses, in effect making the
+program proprietary.  To prevent this, we have made it clear that any
+patent must be licensed for everyone's free use or not licensed at all.
+
+  The precise terms and conditions for copying, distribution and
+modification follow.
+
+		    GNU GENERAL PUBLIC LICENSE
+   TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+  0. This License applies to any program or other work which contains
+a notice placed by the copyright holder saying it may be distributed
+under the terms of this General Public License.  The "Program", below,
+refers to any such program or work, and a "work based on the Program"
+means either the Program or any derivative work under copyright law:
+that is to say, a work containing the Program or a portion of it,
+either verbatim or with modifications and/or translated into another
+language.  (Hereinafter, translation is included without limitation in
+the term "modification".)  Each licensee is addressed as "you".
+
+Activities other than copying, distribution and modification are not
+covered by this License; they are outside its scope.  The act of
+running the Program is not restricted, and the output from the Program
+is covered only if its contents constitute a work based on the
+Program (independent of having been made by running the Program).
+Whether that is true depends on what the Program does.
+
+  1. You may copy and distribute verbatim copies of the Program's
+source code as you receive it, in any medium, provided that you
+conspicuously and appropriately publish on each copy an appropriate
+copyright notice and disclaimer of warranty; keep intact all the
+notices that refer to this License and to the absence of any warranty;
+and give any other recipients of the Program a copy of this License
+along with the Program.
+
+You may charge a fee for the physical act of transferring a copy, and
+you may at your option offer warranty protection in exchange for a fee.
+
+  2. You may modify your copy or copies of the Program or any portion
+of it, thus forming a work based on the Program, and copy and
+distribute such modifications or work under the terms of Section 1
+above, provided that you also meet all of these conditions:
+
+    a) You must cause the modified files to carry prominent notices
+    stating that you changed the files and the date of any change.
+
+    b) You must cause any work that you distribute or publish, that in
+    whole or in part contains or is derived from the Program or any
+    part thereof, to be licensed as a whole at no charge to all third
+    parties under the terms of this License.
+
+    c) If the modified program normally reads commands interactively
+    when run, you must cause it, when started running for such
+    interactive use in the most ordinary way, to print or display an
+    announcement including an appropriate copyright notice and a
+    notice that there is no warranty (or else, saying that you provide
+    a warranty) and that users may redistribute the program under
+    these conditions, and telling the user how to view a copy of this
+    License.  (Exception: if the Program itself is interactive but
+    does not normally print such an announcement, your work based on
+    the Program is not required to print an announcement.)
+
+These requirements apply to the modified work as a whole.  If
+identifiable sections of that work are not derived from the Program,
+and can be reasonably considered independent and separate works in
+themselves, then this License, and its terms, do not apply to those
+sections when you distribute them as separate works.  But when you
+distribute the same sections as part of a whole which is a work based
+on the Program, the distribution of the whole must be on the terms of
+this License, whose permissions for other licensees extend to the
+entire whole, and thus to each and every part regardless of who wrote it.
+
+Thus, it is not the intent of this section to claim rights or contest
+your rights to work written entirely by you; rather, the intent is to
+exercise the right to control the distribution of derivative or
+collective works based on the Program.
+
+In addition, mere aggregation of another work not based on the Program
+with the Program (or with a work based on the Program) on a volume of
+a storage or distribution medium does not bring the other work under
+the scope of this License.
+
+  3. You may copy and distribute the Program (or a work based on it,
+under Section 2) in object code or executable form under the terms of
+Sections 1 and 2 above provided that you also do one of the following:
+
+    a) Accompany it with the complete corresponding machine-readable
+    source code, which must be distributed under the terms of Sections
+    1 and 2 above on a medium customarily used for software interchange; or,
+
+    b) Accompany it with a written offer, valid for at least three
+    years, to give any third party, for a charge no more than your
+    cost of physically performing source distribution, a complete
+    machine-readable copy of the corresponding source code, to be
+    distributed under the terms of Sections 1 and 2 above on a medium
+    customarily used for software interchange; or,
+
+    c) Accompany it with the information you received as to the offer
+    to distribute corresponding source code.  (This alternative is
+    allowed only for noncommercial distribution and only if you
+    received the program in object code or executable form with such
+    an offer, in accord with Subsection b above.)
+
+The source code for a work means the preferred form of the work for
+making modifications to it.  For an executable work, complete source
+code means all the source code for all modules it contains, plus any
+associated interface definition files, plus the scripts used to
+control compilation and installation of the executable.  However, as a
+special exception, the source code distributed need not include
+anything that is normally distributed (in either source or binary
+form) with the major components (compiler, kernel, and so on) of the
+operating system on which the executable runs, unless that component
+itself accompanies the executable.
+
+If distribution of executable or object code is made by offering
+access to copy from a designated place, then offering equivalent
+access to copy the source code from the same place counts as
+distribution of the source code, even though third parties are not
+compelled to copy the source along with the object code.
+
+  4. You may not copy, modify, sublicense, or distribute the Program
+except as expressly provided under this License.  Any attempt
+otherwise to copy, modify, sublicense or distribute the Program is
+void, and will automatically terminate your rights under this License.
+However, parties who have received copies, or rights, from you under
+this License will not have their licenses terminated so long as such
+parties remain in full compliance.
+
+  5. You are not required to accept this License, since you have not
+signed it.  However, nothing else grants you permission to modify or
+distribute the Program or its derivative works.  These actions are
+prohibited by law if you do not accept this License.  Therefore, by
+modifying or distributing the Program (or any work based on the
+Program), you indicate your acceptance of this License to do so, and
+all its terms and conditions for copying, distributing or modifying
+the Program or works based on it.
+
+  6. Each time you redistribute the Program (or any work based on the
+Program), the recipient automatically receives a license from the
+original licensor to copy, distribute or modify the Program subject to
+these terms and conditions.  You may not impose any further
+restrictions on the recipients' exercise of the rights granted herein.
+You are not responsible for enforcing compliance by third parties to
+this License.
+
+  7. If, as a consequence of a court judgment or allegation of patent
+infringement or for any other reason (not limited to patent issues),
+conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License.  If you cannot
+distribute so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you
+may not distribute the Program at all.  For example, if a patent
+license would not permit royalty-free redistribution of the Program by
+all those who receive copies directly or indirectly through you, then
+the only way you could satisfy both it and this License would be to
+refrain entirely from distribution of the Program.
+
+If any portion of this section is held invalid or unenforceable under
+any particular circumstance, the balance of the section is intended to
+apply and the section as a whole is intended to apply in other
+circumstances.
+
+It is not the purpose of this section to induce you to infringe any
+patents or other property right claims or to contest validity of any
+such claims; this section has the sole purpose of protecting the
+integrity of the free software distribution system, which is
+implemented by public license practices.  Many people have made
+generous contributions to the wide range of software distributed
+through that system in reliance on consistent application of that
+system; it is up to the author/donor to decide if he or she is willing
+to distribute software through any other system and a licensee cannot
+impose that choice.
+
+This section is intended to make thoroughly clear what is believed to
+be a consequence of the rest of this License.
+
+  8. If the distribution and/or use of the Program is restricted in
+certain countries either by patents or by copyrighted interfaces, the
+original copyright holder who places the Program under this License
+may add an explicit geographical distribution limitation excluding
+those countries, so that distribution is permitted only in or among
+countries not thus excluded.  In such case, this License incorporates
+the limitation as if written in the body of this License.
+
+  9. The Free Software Foundation may publish revised and/or new versions
+of the General Public License from time to time.  Such new versions will
+be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+Each version is given a distinguishing version number.  If the Program
+specifies a version number of this License which applies to it and "any
+later version", you have the option of following the terms and conditions
+either of that version or of any later version published by the Free
+Software Foundation.  If the Program does not specify a version number of
+this License, you may choose any version ever published by the Free Software
+Foundation.
+
+  10. If you wish to incorporate parts of the Program into other free
+programs whose distribution conditions are different, write to the author
+to ask for permission.  For software which is copyrighted by the Free
+Software Foundation, write to the Free Software Foundation; we sometimes
+make exceptions for this.  Our decision will be guided by the two goals
+of preserving the free status of all derivatives of our free software and
+of promoting the sharing and reuse of software generally.
+
+			    NO WARRANTY
+
+  11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY
+FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW.  EXCEPT WHEN
+OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES
+PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED
+OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.  THE ENTIRE RISK AS
+TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU.  SHOULD THE
+PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,
+REPAIR OR CORRECTION.
+
+  12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR
+REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
+INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING
+OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED
+TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY
+YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER
+PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGES.
+
+		     END OF TERMS AND CONDITIONS
+
+	Appendix: How to Apply These Terms to Your New Programs
+
+  If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these terms.
+
+  To do so, attach the following notices to the program.  It is safest
+to attach them to the start of each source file to most effectively
+convey the exclusion of warranty; and each file should have at least
+the "copyright" line and a pointer to where the full notice is found.
+
+    <one line to give the program's name and a brief idea of what it does.>
+    Copyright (C) 19yy  <name of author>
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published by
+    the Free Software Foundation; either version 2 of the License, or
+    (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public License
+    along with this program; if not, write to the Free Software
+    Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
+
+Also add information on how to contact you by electronic and paper mail.
+
+If the program is interactive, make it output a short notice like this
+when it starts in an interactive mode:
+
+    Gnomovision version 69, Copyright (C) 19yy name of author
+    Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
+    This is free software, and you are welcome to redistribute it
+    under certain conditions; type `show c' for details.
+
+The hypothetical commands `show w' and `show c' should show the appropriate
+parts of the General Public License.  Of course, the commands you use may
+be called something other than `show w' and `show c'; they could even be
+mouse-clicks or menu items--whatever suits your program.
+
+You should also get your employer (if you work as a programmer) or your
+school, if any, to sign a "copyright disclaimer" for the program, if
+necessary.  Here is a sample; alter the names:
+
+  Yoyodyne, Inc., hereby disclaims all copyright interest in the program
+  `Gnomovision' (which makes passes at compilers) written by James Hacker.
+
+  <signature of Ty Coon>, 1 April 1989
+  Ty Coon, President of Vice
+
+This General Public License does not permit incorporating your program into
+proprietary programs.  If your program is a subroutine library, you may
+consider it more useful to permit linking proprietary applications with the
+library.  If this is what you want to do, use the GNU Library General
+Public License instead of this License.

Added: packages/drawmap/branches/upstream/current/COPYING_NOTE
===================================================================
--- packages/drawmap/branches/upstream/current/COPYING_NOTE	                        (rev 0)
+++ packages/drawmap/branches/upstream/current/COPYING_NOTE	2007-05-18 12:29:03 UTC (rev 834)
@@ -0,0 +1,26 @@
+
+October 11, 1998
+
+I have had a request to change the licensing on "drawmap" so that it is subject
+to the GNU Library General Public License rather than the plain old GNU General
+Public License.  This would allow it to be incorporated into packages that, for
+whatever reason, can't be distributed in source-code form.
+
+However, after examining the situation in depth, I have decided not to do this.
+My primary reason is that I have incorporated large portions of the "gzip"
+package into drawmap, in order to allow compressed files to be read efficiently.
+The "gzip" package is licensed under the GNU General Public License, and it
+isn't within my legal power to change the licensing of that code.  The only
+obvious way to change the licensing on "drawmap" would be to split it into two
+separately-distributed chunks:  one containing the modified "gzip" code, and the
+other containing the remainder of "drawmap".  While this would be possible, it
+doesn't seem worthwhile, since the only portion of "drawmap" that has much
+utility as a stand-alone library is the "gzip" portion itself.
+
+If you have an interest in using other portions of "drawmap" under the terms of
+the GNU Library General Public License, then I suggest you contact me at the
+contact address in the README file.  I don't object to such use, as long as you
+understand that the "gzip" code is not mine to control.
+
+Fred M. Erickson
+

Added: packages/drawmap/branches/upstream/current/Makefile
===================================================================
--- packages/drawmap/branches/upstream/current/Makefile	                        (rev 0)
+++ packages/drawmap/branches/upstream/current/Makefile	2007-05-18 12:29:03 UTC (rev 834)
@@ -0,0 +1,93 @@
+# =========================================================================
+# Makefile - Information for building drawmap, and associated programs.
+# Copyright (c) 1997,1998,1999,2000  Fred M. Erickson
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
+# =========================================================================
+
+
+
+# If you want a copyright notice inserted into the image, then
+# comment out the first version of NAME, and uncomment the
+# second, and put your name inside the quotes.
+NAME=\"\"
+#NAME=\"Fred M. Erickson\"
+
+CFLAGS = -O
+
+
+
+all: drawmap ll2utm utm2ll unblock_dlg unblock_dem llsearch sdts2dem sdts2dlg man
+
+drawmap: drawmap.c dem.c dem_sdts.c dlg.c dlg_sdts.c sdts_utils.c big_buf_io.c big_buf_io_z.c gunzip.c \
+	 utilities.c gtopo30.c gzip.h font_5x8.h font_6x10.h raster.h drawmap.h colors.h dlg.h dem.h sdts_utils.h
+	$(CC) -DCOPYRIGHT_NAME="${NAME}" $(CFLAGS) -o drawmap drawmap.c dem.c dem_sdts.c dlg.c dlg_sdts.c \
+		sdts_utils.c gtopo30.c big_buf_io.c big_buf_io_z.c gunzip.c utilities.c -lm
+
+ll2utm: ll2utm.c utilities.c
+	$(CC) $(CFLAGS) -o ll2utm ll2utm.c utilities.c -lm
+
+utm2ll: utm2ll.c utilities.c
+	$(CC) $(CFLAGS) -o utm2ll utm2ll.c utilities.c -lm
+
+unblock_dlg: unblock_dlg.c
+	$(CC) $(CFLAGS) -o unblock_dlg unblock_dlg.c
+
+unblock_dem: unblock_dem.c
+	$(CC) $(CFLAGS) -o unblock_dem unblock_dem.c
+
+llsearch: llsearch.c big_buf_io.c utilities.c
+	$(CC) $(CFLAGS) -o llsearch llsearch.c big_buf_io.c utilities.c -lm
+
+sdts2dem: sdts2dem.c sdts_utils.c dem.c dem_sdts.c big_buf_io.c big_buf_io_z.c gunzip.c \
+	 utilities.c gzip.h drawmap.h dem.h sdts_utils.h
+	$(CC) $(CFLAGS) -o sdts2dem sdts2dem.c dem.c dem_sdts.c sdts_utils.c big_buf_io.c big_buf_io_z.c gunzip.c utilities.c -lm
+
+sdts2dlg: sdts2dlg.c dlg.c dlg_sdts.c sdts_utils.c big_buf_io.c big_buf_io_z.c gunzip.c \
+	 utilities.c gzip.h drawmap.h dlg.h sdts_utils.h
+	$(CC) $(CFLAGS) -o sdts2dlg sdts2dlg.c dlg.c dlg_sdts.c sdts_utils.c big_buf_io.c big_buf_io_z.c gunzip.c utilities.c -lm
+
+man: drawmap.1 ll2utm.1 utm2ll.1 llsearch.1 unblock_dlg.1 unblock_dem.1 sdts2dem.1 sdts2dlg.1
+
+drawmap.1: drawmap.1n
+	nroff -man drawmap.1n > drawmap.1
+
+ll2utm.1: ll2utm.1n
+	nroff -man ll2utm.1n > ll2utm.1
+
+utm2ll.1: utm2ll.1n
+	nroff -man utm2ll.1n > utm2ll.1
+
+llsearch.1: llsearch.1n
+	nroff -man llsearch.1n > llsearch.1
+
+unblock_dlg.1: unblock_dlg.1n
+	nroff -man unblock_dlg.1n > unblock_dlg.1
+
+unblock_dem.1: unblock_dem.1n
+	nroff -man unblock_dem.1n > unblock_dem.1
+
+sdts2dem.1: sdts2dem.1n
+	nroff -man sdts2dem.1n > sdts2dem.1
+
+sdts2dlg.1: sdts2dlg.1n
+	nroff -man sdts2dlg.1n > sdts2dlg.1
+
+clean:
+	rm -f drawmap ll2utm utm2ll unblock_dlg unblock_dem llsearch sdts2dem sdts2dlg \
+		drawmap.1 ll2utm.1 utm2ll.1 llsearch.1 unblock_dlg.1 unblock_dem.1 sdts2dem.1 sdts2dlg.1 \
+		drawmap.o dem.o dem_sdts.o dlg.o dlg_sdts.o sdts_utils.o big_buf_io.o \
+		big_buf_io_z.o gunzip.o utilities.o ll2utm.o utm2ll.o unblock_dlg.o unblock_dem.o llsearch.o sdts2dem.o sdts2dlg.o
+

Added: packages/drawmap/branches/upstream/current/README
===================================================================
--- packages/drawmap/branches/upstream/current/README	                        (rev 0)
+++ packages/drawmap/branches/upstream/current/README	2007-05-18 12:29:03 UTC (rev 834)
@@ -0,0 +1,130 @@
+
+Some notes on drawmap.  Updated on August  2, 2001.
+
+First, how to build it.  Edit the Makefile if you want a copyright
+notice on all of the maps you generate.  Then, simply type:
+
+make
+
+If you aren't on a Linux(TM) system, or similar Unix(TM) system, you will
+probably end up giving up and deleting the whole mess.  Otherwise, you
+should end up with eight executables:  drawmap, llsearch, ll2utm, utm2ll,
+block_dem, block_dlg, sdts2dem, and sdts2dlg.  There should also be eight
+formatted manual pages, whose file names end with a ".1" extension; and
+eight unformatted manual pages, whose file names end with a ".1n"
+extension.
+
+Install things wherever you want.  On my system, the executables go into
+/usr/local/bin, the manual pages with a ".1" extension get compressed
+(with gzip) and go into /usr/local/man/cat1, and the manual pages with
+a ".1n" extension get compressed (with gzip) and go into /usr/local/man/man1.
+(You don't have to install both the ".1" and ".1n" versions if you don't
+want to.  The ".1n" version is probably the most useful, since it allows
+you to reformat the manual pages into various output formats.)
+
+I have resisted the urge to hard-code any pathnames, library locations,
+and what-have-you into the code.  There is one compile-time option you can
+invoke by editing the Makefile.  If you provide your name, it will be
+compiled into the program, and a copyright notice, with your name in it,
+will be added to the bottom of each map you generate.  I am unsure about
+the copyright status of a map generated by drawmap, since anyone else
+can generate the same map using the same data, but the feature was easy
+to add in, so I did.
+
+
+
+Second, how to use it.  You need some data.  Once you have at least one
+Digital Elevation Model (DEM) file, using drawmap is as simple as typing:
+
+	drawmap -d name_of_your_dem_file
+
+The output will be in a file called "drawmap.sun" and you should be able
+to view it with image viewers like "xv" and "ImageMagick".  Sources of
+downloadable data are listed in the manual page.  There is a lot of useful
+information in the manual page; you might want to give it a read.
+In particular, there are some usage examples near the end of the page.
+
+
+
+Drawmap grew out of my curiosity regarding what could be done with the
+available USGS data files.  It was originally intended just to play
+with Digital Elevation Model (DEM) files.  Later I added processing
+for Digital Line Graph (DLG) files, GTOPO30 files, and Geographic Names
+Information System (GNIS) files.  Because of this history as an experimental
+testbed, drawmap was never actually designed.  Instead, it grew
+by accretion, with major sections being reworked from time to
+time as I decided to try new approaches.  There have even been a
+couple of top-to-bottom reworks, as I found it desirable to try a
+much different approach to one thing or another.  The addition of
+support for SDTS files considerably increased the complexity of the
+program.  Thus, if you are looking for elegant structure and logical
+design, you may be disappointed.  On the flip side, it is copiously
+commented, and you should find it reasonably straightforward to modify.
+Keeping straight all of the various translations and rotations of data
+can be mind-numbing; but that pretty much goes with the territory when
+you work on a program like this.
+
+Also, in order to minimize its memory footprint, drawmap uses malloc()
+to get the space it needs, and then uses pointers to access it.  I find
+the resulting code much more difficult to work with than code that
+simply allocates a 2-dimensional array and uses two index values to
+access the data.  Thus, if I plan to do a major rework on the code,
+I sometimes convert it back to using arrays, do what needs doing,
+and then convert it to using pointers.  Some people prefer to
+define macros that simulate arrays on the one-dimensional malloc()
+memory.  Others may prefer to define C++ objects to hide all of the
+messy details.  I like simplicity.  Your mileage may vary.
+
+I have a to-do list of things that would be nice to have.  Near the
+top of the list is to improve the handling of text.  The routine that
+adds text annotations to the image is very crude.  I cobbled it together
+one night because I needed the capability to add cursors and text in
+order to test out some algorithms, and I have never gone back to improve
+it.  Only two bitmapped fonts are available, and they can only be
+printed horizontally.  It would be super cool (and perhaps even swell)
+to have a bigger set of prettier fonts that could be scaled and rotated.
+This would be particularly nice for plotting hypsographic contour lines
+with the traditional elevation numbers printed along selected contours.
+(Although trying to do that sort of thing automatically would be a
+challenging task, indeed.)
+
+It would also be a good idea to try to make the thing portable.  At
+present, it has only been compiled (by me) under Linux.  Based on past
+experience, I assume that it will readily port to other Unix systems.
+However, porting it to Windows(TM) might not be fun.
+
+Another possibility would be to re-write it to be an X-Window
+application.  This would allow some interactive features to be added
+and would also provide instant access to a large set of fonts and
+drawing primitives.  However, there are already systems like that
+available, and I am not sure I want to produce yet another big,
+clunky, non-portable thingamabob that nobody uses because they
+can't get the thing to compile on their machines.  I know how
+disappointed I am when I go to the trouble to download something,
+and then find out I need some large support package before I can
+use the software.
+
+If you like the kind of resolution available in a 7.5-minute
+topographic map, you might want to look into the availability of
+DRG files.  DRG files are scanned-in versions of the standard
+USGS 7.5-minute map sheets.  The USGS sells these files on CD-ROM,
+but they are also available on-line for some locales.  For example,
+the state of Montana has a server providing all of the available
+DRG files for Montana.  In fact, a number of states provide such
+a service.  There are also a few sites that aren't tied to particular
+states, but have a smorgasbord of files from various locations.
+DRG files have no connection to drawmap.  I mention them here simply
+because you might find them of interest.
+
+I hope you find drawmap both useful and fun.  Remember though, that it
+is basically an experimental testbed, and not a commercial product.
+Use it at your own risk.  Drawmap, and all associated software and
+documentation, are provided "as is", and are licensed under the GNU
+General Public License.  See the file "COPYING" for license details.
+
+Fred M. Erickson
+603 15th Street
+Havre, MT  59501-5339
+
+fme at ttc-cmc.net
+

Added: packages/drawmap/branches/upstream/current/WHATS_NEW
===================================================================
--- packages/drawmap/branches/upstream/current/WHATS_NEW	                        (rev 0)
+++ packages/drawmap/branches/upstream/current/WHATS_NEW	2007-05-18 12:29:03 UTC (rev 834)
@@ -0,0 +1,371 @@
+
+
+Aug  2  2001:
+Changes in drawmap between version 2.4 and version 2.5:
+
++ Version 2.4 used a relatively simple interpolation algorithm to map
+  24K DEM data into the map image.  It didn't warp the data blocks to
+  completely fill the corresponding latitude/longitude squares.
+  Version 2.5 uses a more sophisticated algorithm, which does warp
+  the data, and consequently produces better data registration at the
+  seams between data blocks, along with more accurate placement of data
+  points.
+
+  The "LIMITS" section, at the end of the drawmap manual page, has
+  been modified to reflect this change.
+
++ User-specified attribute restrictions were being ignored for
+  SDTS DLG files.  They should no longer be ignored.  (Thanks
+  to BM for reporting this problem.)
+
++ Some 7.5 minute DEM files use SDTS conventions that are different
+  from those of the bulk of the files I have examined.  For example,
+  while it appears that most files specify a sequence of (x,y) coordinates
+  as an ISO 8211 2-dimensional cartesian array, some files specify them
+  as simple repeating pairs.  I am not sure that this particular construct
+  is standards-compliant, but I have modified drawmap to try to handle it
+  anyway.  (Thanks to TF for reporting this problem.)
+
+  There are also cases where, for example, data items are stored as integers
+  (in ASCII decimal, or straight binary) for one quad, while the same items
+  are stored as real numbers for another.  This sort of thing is allowed under
+  the standard.  (Under the standard, there are six acceptable ways to store
+  elevations.)  Drawmap doesn't try to handle every possible format variation.
+  It does try to handle the ones that I have sample files for.  I made several
+  changes in this release to support additional subfield formats.  However,
+  files that drawmap cannot handle may still be lurking out there.
+  
++ Due to floating point rounding errors, sdts2dem (and possibly sdts2dlg)
+  did not always determine correct output file names.  This has (hopefully)
+  been fixed.  It is a bit difficult to be sure, because I don't know of
+  any firm limits on the values that can appear in an SDTS DEM or DLG file,
+  and the SDTS files themselves may contain errors.  In any case, the
+  file-name generation code has been made more robust.
+
++ Some 7.5 minute DEM files appear to contain 32767 or -32767 as markers
+  for non-valid data.  Drawmap was unaware of this and treated the values
+  as valid elevations.  Such values are now discarded.
+
++ Moved the comment blocks to the ends of the manual pages.
+  For some reason, having them at the beginning caused a
+  whole bunch of blank pages to be output at the beginning
+  of a formatted page.
+
++ Improved handling of return values in get_a_line().  As far as
+  I know, the unimproved routine works okay with drawmap; but
+  the improvements should make get_a_line() more robust for
+  people who want to use the routine in their own software.
+
++ Made a couple of minor changes to make drawmap more portable.
+
++ Fixed some minor miscellaneous problems.
+
+
+
+
+
+
+Mar 26  2001:
+Changes in drawmap between version 2.3 and version 2.4:
+
++ Drawmap can now handle DEM files with elevations in feet.
+
++ Some clarifications have been added to the manual page,
+  including an example of the use of GTOPO30 files.
+
++ There is some additional installation information in the
+  README file.
+
+
+
+
+
+
+Jan 16  2001:
+Changes in drawmap between version 2.2 and version 2.3:
+
++ This point release is partly the result of downloading more
+  test data for more parts of the USA, and fixing whatever new
+  problems might show up.  There are, however, several new
+  options and general modifications, and also a few bug fixes.
+  These are described below, although some of the minor bug
+  fixes are omitted.
+
++ Added the -r option to allow the sharpness of the shaded
+  relief to be varied.  The default is fully-shaded relief,
+  which was the only available shading in previous releases.
+  The new option allows the darkness of shadows to be
+  reduced, in steps, until all shading disappears and only
+  simple color bands remain.
+
++ Added the -z option, which adjusts the color table so that
+  it exactly spans the range of elevations in the given
+  data.  This ensures that all available colors are used in
+  the map; but at the cost of having each color represent
+  a peculiar range of elevations.
+
++ Added the -m option, which allows limited enhancement
+  of light/dark shading in shaded-relief maps.
+
++ Added the -t option, to shut off generation of tick
+  marks and latitude/longitude markings.
+
++ In response to a request, added the -C option, which works
+  the same as the -c option, except that it fills in the
+  space between contour lines with solid colors.
+
++ In past releases, the -c option located contour lines in
+  the centers of the elevation bands.  In other words, if
+  the contour interval was 100 meters, then the first contour
+  would be at 50 meters, with the next at 150 meters.
+  In this release the code has been modified so that contour
+  lines are on the edges of the elevation bands.  In other
+  words, if the contour interval is 100 meters, then the
+  contours are at 100 meters, 200 meters, and so on.
+
++ A few of the GTOPO30 files were rejected because quantization
+  error caused some consistency checking to fail.  Loosened
+  up the checks slightly so that the files now pass muster.
+
++ Updated the color scheme for drawing DLG vector data.
+  Changed railroads and pipelines to black, and changed
+  vegetative features to green.
+
++ Added support for more attribute types, including
+  190/UNPAVED.  Fixed a few attributes that have special
+  cases, such as range and township numbers that can
+  take forms like "10 1/2E".
+
++ Some USGS SDTS DLG files have attributes whose record
+  IDs double back and duplicate other record IDs in the file.
+  (Wilmington Delaware 24K DLG HY 1379127.HY.sdts.tar.gz
+  and 1379151.HY.sdts.tar.gz are examples.)  I would guess
+  that this is a bug in the SDTS files, since it would leave
+  no way to distinguish between attributes.  (One could use
+  the record number in the ISO 8211 header, but I don't
+  think this is considered legitimate under the standard.)
+  One way or the other, this was causing drawmap some
+  indigestion, so the software has been fixed to deal with
+  the problem as best it can.  Drawmap prints a warning
+  message when this situation occurs.
+
++ Beefed up the test program, tacked onto the end of
+  sdts_utils.c, so that it is a useful viewing tool
+  for SDTS files.
+
+
+
+
+
+
+Aug  1  2000:
+Changes in drawmap between version 2.1 and version 2.2:
+
++ Added the sdts2dem and sdts2dlg programs to convert
+  SDTS transfers into the `classic' DEM and `optional'
+  DLG formats.  I wrote these primarily to allow me
+  to test drawmap's SDTS capabilities, but they should
+  be generally useful.
+
++ Improved the handling of SDTS files.  In particular,
+  we no longer ignore some of the minor attribute files,
+  so all attributes should be included.  Improved the
+  handling of attributes overall.  This includes
+  specific support for attributes with special needs.
+
++ There were a number of minor repairs and upgrades here
+  and there.
+  
++ Decreased the default memory footprint a bit, by changing
+  some long variables to shorts in some of the larger
+  arrays.  Unfortunately, new feature development probably
+  devoured a lot of these savings.
+  
++ Modified the -h option to automatically include a blue
+  surface at sea level.
+  
++ Modified the -i option to include the DLG name, and the
+  type of data in the file, when printing info about DLG files.
+
+
+
+
+
+Jul 23  2000:
+Changes in drawmap between version 2.0 and version 2.1:
+
++ Added support for the NAD-83 datum and the WGS-84 datum.
+
++ Drawmap can now read SDTS-format DEM and DLG files.
+
++ At the request of a drawmap user, I added GTOPO30
+  files to the set of files drawmap can read.  This should
+  provide at least some map-drawing capability for people
+  who can't get DEM data for their countries of interest.
+
++ There were a number of minor repairs and upgrades here
+  and there.
+
++ Changed the names of block_dem and block_dlg to unblock_dem
+  and unblock_dlg so that they are consistent with the
+  equivalent "dd" commands.
+
+
+
+
+
+Jul  8  2000:
+Changes in drawmap between version 1.10 and version 2.0:
+
++ This was a fairly major restructuring of drawmap, mainly to
+  get it to handle DEM and DLG data at the 7.5-minute
+  resolution.
+
++ In past versions of drawmap, data smoothing was done when
+  the image resolution and data resolution were the same.
+  This version of drawmap no longer smooths when the source
+  data and target image have close to the same resolution.
+  In addition, the code implementing smoothing has been
+  clarified, so that the smoothing behavior should be easier
+  to understand and modify.
+
++ Restructured the shaded relief code to allow it to handle
+  data from DEMs in various resolutions.  Also moved most of
+  the color definitions into colors.h so that users can more
+  easily modify the color scheme.  Provided four separate
+  color map tables, selectable by the "-n" option, so that
+  users can choose between a variety of color schemes.
+  "Natural" colors are now the default, rather than the
+  garish high-perception colors used in previous versions.
+
+  Also toned down the red and blue that are used to draw
+  streams and roads.  They were a bit distracting.
+
++ Added a new "-w" option to allow flat sea-level areas to be
+  automatically colored blue.
+
++ Removed the requirement that the x and y dimensions be even.
+  This was done primarily because 7.5-minute DEMs often have
+  odd dimensions.
+
++ Drawmap no longer tries to create an image file until after
+  most error checking is done.  This should reduce the incidence
+  of half-created image files being left behind after an error.
+
++ Drawmap now uses Redfearn's formulas to convert UTM coordinates
+  to latitude/longitude coordinates and vice versa.  However,
+  the program still uses linear interpolation from the corners
+  of the data blocks to transfer DEM data into the map image.
+
++ There are two new stand-alone commands, utm2ll and ll2utm that
+  convert latitude/longitude coordinates to UTM coordinates and
+  vice versa.  They currently only support the NAD-27 datum,
+  but can be easily modified to use others.
+
++ The dem_dlg_info program has been deleted.  It is replaced by
+  the "-i" option to drawmap.
+
++ The height-field program in the height_field subdirectory has
+  been deleted.  It is replaced by the "-h" option to drawmap.
+  The code that tried to generate "povray" objects for roads,
+  lakes, towns, and so on, is no longer present in any form.
+  It produced crude results and didn't seem very useful.
+
+
+
+
+
+Jun 18  2000:
+Changes in drawmap between version 1.9 and version 1.10:
+
++ The USGS has changed the format of both types of GNIS
+  files.  Modified drawmap to handle the new formats.
+  NOTE:  Files in the old formats will no longer work.
+
++ Cleaned up a few minor things here and there.
+
+
+
+
+
+Jun  3  2000:
+Changes in drawmap between version 1.8 and version 1.9:
+
++ Some of the newer USGS 100K DLG files contain newlines.
+  (Older ones did not.)
+  Modified drawmap to handle this new wrinkle.
+
++ Provided a new program called dem_dlg_info, to print
+  out some useful information from the headers of DEM
+  and DLG files.
+
++ Dropped the block_std program, since standard-format
+  files have disappeared from the USGS web site.
+
++ Added manual pages for block_opt and dem_dlg_info.
+
+
+
+
+
+Jan  3  2000:
+Changes in drawmap between version 1.7 and version 1.8:
+
++ Clarified the code comments and the manual page for drawmap.
+
++ Included the povray_height_field directory, which contains
+  a modified version of drawmap that will produce information
+  that the ``povray'' package can use to make 3-dimensional
+  renderings of terrain.
+
++ There were no changes in the operation of drawmap --- only
+  changes in the code comments.  The primary purpose of this
+  release was to add the drawmap_height_field program.
+
+
+
+
+
+Jan 16  1999:
+Changes in drawmap between version 1.6 and version 1.7:
+
++ removed restrictions on the size of contouring intervals
+
++ added code to allow use on big-endian machines
+
++ added gaussian smoothing for oversampled maps to reduce checkerboard effect
+
++ changed DLG line-drawing algorithm to produce better map registration
+
++ changed drawmap and llsearch to handle the new GNIS format
+
+
+
+
+
+Dec 11  1998:
+Changes in drawmap between version 1.3 and version 1.6:
+
++ Fixed a portability bug, and an obscure bug in the code that
+  draws tick marks at the edges of the image.
+
++ Added some new information about licensing (see the file "COPYING_NOTE").
+
++ Added a new option "-c", that draws contour lines instead of the normal
+  shaded relief.
+
+
+
+
+
+Oct 12  1998:
+Changes in drawmap between version 1.3 and version 1.5
+(there was no version 1.4):
+
++ Fixed a portability bug, and an obscure bug in the code that
+  draws tick marks at the edges of the image.
+
++ Added some new information about licensing (see the file "COPYING_NOTE").
+
++ Added a new option "-c", that draws contour lines instead of the normal
+  shaded relief.

Added: packages/drawmap/branches/upstream/current/attrib_codes
===================================================================
--- packages/drawmap/branches/upstream/current/attrib_codes	                        (rev 0)
+++ packages/drawmap/branches/upstream/current/attrib_codes	2007-05-18 12:29:03 UTC (rev 834)
@@ -0,0 +1,946 @@
+NOTE:  THIS IS AN EXTRACT FROM A USGS DOCUMENT.  THE ORIGINAL DOCUMENT WAS OBTAINED FROM A USGS WEB SITE.
+
+
+
+
+              APPENDIX D.--DLG Attribute Codes
+
+
+
+Valid Minor Codes for the Coincident Feature Parameter
+
+    Code         Base Category
+
+    0002         Hypsography
+
+    0005         Hydrography
+
+    0007         Surface Cover
+
+    0009         Boundary
+
+    0015         Survey Control
+
+                 Transportation Systems
+
+    0017         Roads and Trails
+
+    0018         Railroads
+
+    0019         Pipelines, Transmission Lines, Miscellaneous
+                   Transportation
+
+    0020         Manmade Features
+
+
+
+
+                     APPENDIX D.--DLG Attribute Codes--continued
+__________________________________________________________________________________________________________________________________
+                                                                            MAJOR   MINOR
+DATA CATEGORY             TYPE OF CODE              APPLICATION             CODE    CODE                  DESCRIPTION
+__________________________________________________________________________________________________________________________________
+Hypsography               Feature identification    Nodes                    020     NONE    
+
+                                                    Areas                    020     0100    Void area
+
+                                                    Lines                    020     0200    Contour (index or intermediate)
+                                                                                     0201    Carrying contour
+                                                                                     0202    Supplementary contour
+                                                                                     0203    Continuation contour
+                                                                                     0204    Amended contour
+                                                                                     0205    Bathymetric contour
+                                                                                    *0206    Depth curve
+                                                                                     0207    Watershed divides
+                                                                                     0208    Closure line
+
+                                                    Points                   020     0300    Spot elevation, less than
+                                                     (degenerate lines)                       third order,
+
+                                                                             020     0301    Spot elevation, less than
+                                                                                              third order, not at ground
+                                                                                              level.
+
+                                                    Multiple element         020     NONE    
+                                                     types
+
+                          Descriptive               Multiple element         020     0600-   Decimal fractions of feet
+                                                     types                           0609     or meters
+                                                                                     0610    Approximate
+                                                                                     0611    Depression
+                                                                                     0612    Glacier or snow field
+                                                                                     0613    Underwater
+                                                                                     0614    Best estimate of contour
+                                                                                              elevation value
+
+                                                                             020     0000    Photorevised feature
+__________________________________________________________________________________________________________________________________
+* denotes a code which is no longer being used to encode features, but which may appear in older files.
+
+
+
+
+                     APPENDIX D.--DLG Attribute Codes--continued
+__________________________________________________________________________________________________________________________________
+                                                                            MAJOR   MINOR
+DATA CATEGORY             TYPE OF CODE              APPLICATION             CODE    CODE                  DESCRIPTION
+__________________________________________________________________________________________________________________________________
+Hypsography               Parameter                 Multiple element         02N     ----   Elevation in whole feet or
+ (cont'd.)                                           types                                   meters, right-justified
+                                                                             026     00--   Major category associated
+                                                                                             with a spot height, not at
+                                                                                             ground elevation.
+                                                                             029     00--   Coincident feature
+__________________________________________________________________________________________________________________________________
+Hydrography               Feature identification    Nodes                    050     0001   Upper origin of stream
+                                                                                     0002   Upper origin of stream at water body
+                                                                                     0003   Sink, channel no longer evident
+                                                                                     0004   Stream entering water body
+                                                                                     0005   Steam exiting water body
+
+                                                    Areas                    050     0100   Alkali flat
+                                                                                     0101   Reservoir 
+                                                                                     0102   Covered reservoir
+                                                                                     0103   Glacier or permanent snowfield
+                                                                                     0104   Salt evaporator
+                                                                                     0105   Inundation area
+                                                                                     0106   Fish hatchery or farm
+                                                                                     0107   Industrial water impoundment
+                                                                                     0108   Area to be submerged
+                                                                                     0109   Sewage disposal pond or 
+                                                                                             filtration beds
+                                                                                     0110   Tailings pond
+                                                                                     0111   Marsh, wetland, swamp, bog
+                                                                                     0112   Mangrove area
+                                                                                     0113   Rice field
+                                                                                     0114   Cranberry bog
+                                                                                     0115   Flats (tidal, mud, sand, gravel)
+                                                                                     0116   Bays, estuaries, gulfs, oceans, seas
+                                                                                     0117   Shoal 
+                                                                                     0118   Soda evaporator
+                                                                                     0119   Duck Pond
+                                                                                     0120   Void area
+
+
+
+
+                     APPENDIX D.--DLG Attribute Codes--continued
+__________________________________________________________________________________________________________________________________
+                                                                            MAJOR   MINOR
+DATA CATEGORY             TYPE OF CODE              APPLICATION             CODE    CODE                  DESCRIPTION
+__________________________________________________________________________________________________________________________________
+Hydrography               Feature identification    Lines                    050     0200   Shoreline
+ (cont'd.)       (cont'd.)                                                           0201   Manmade shoreline
+                                                                                     0202   Closure line
+                                                                                     0203   Indefinite shoreline
+                                                                                     0204   Apparent limit
+                                                                                     0205   Outline of a Carolina bay
+                                                                                     0206   Danger Curve
+                                                                                     0207   Apparent shoreline
+
+                                                    Points                   050     0300   Spring
+                                                                                     0301   Non-flowing well
+                                                                                     0302   Flowing well
+                                                                                     0303   Riser
+                                                                                     0304   Geyser
+                                                                                     0305   Windmill
+                                                                                     0306   Cistern
+
+                                                    Multiple element         050     0400   Rapids
+                                                     types                           0401   Falls
+                                                                                     0402   Gravel pit or quarry filled with water
+                                                                                     0403   Gaging station
+                                                                                     0404   Pumping station
+                                                                                     0405   Water intake
+                                                                                     0406   Dam or weir
+                                                                                     0407   Canal lock or sluice gate
+                                                                                     0408   Spillway
+                                                                                     0409   Gate (flood, tidal, head, check)
+                                                                                     0410   Rock 
+                                                                                     0411   Crevasse
+                                                                                     0412   Stream
+                                                                                     0413   Braided stream
+                                                                                     0414   Ditch or canal
+                                                                                     0415   Aqueduct
+                                                                                     0416   Flume
+                                                                                     0417   Penstock
+                                                                                     0418   Siphon
+                                                                                     0419   Channel in water area
+
+
+
+
+                     APPENDIX D.--DLG Attribute Codes--continued
+__________________________________________________________________________________________________________________________________
+                                                                            MAJOR   MINOR
+DATA CATEGORY             TYPE OF CODE              APPLICATION             CODE    CODE                  DESCRIPTION
+__________________________________________________________________________________________________________________________________
+Hydrography               Feature identification    Multiple element         050     0420   Wash or ephemeral drain
+ (cont'd.)                 (cont'd.)                 types (cont'd.)                 0421   Lake or pond
+                                                                                     0422   Coral reef
+                                                                                     0423   Sand in open water
+                                                                                     0424   Spoil area
+                                                                                     0425   Fish ladders
+                                                                                     0426   Holiday area
+
+                          Descriptive               Multiple element         050     0601   Underground
+                                                     types                           0602   Overpassing
+                                                                                     0603   Elevated
+                                                                                     0604   Tunnel
+                                                                                     0605   Right bank
+                                                                                     0606   Left bank
+                                                                                     0607   Under construction
+                                                                                     0608   Salt
+                                                                                     0609   Unsurveyed
+                                                                                     0610   Intermittent
+                                                                                     0611   Abandoned or discontinued
+                                                                                     0612   Submerged or sunken
+                                                                                    *0613   Wooded
+                                                                                     0614   Dry
+                                                                                     0615   Mineral or hot (sulphur, alkali, etc.)
+                                                                                     0616   Navigable, transportation
+                                                                                     0617   Underpassing
+                                                                                     0618   Earthen construction
+                                                                                     0619   Interpolated elevation
+                                                                                     0621-  Decimal fractions of feet or meters
+                                                                                     0629   
+
+                                                                             050     0000   Photorevised feature
+
+                          Parameter                 Multiple element         05N     ----   Water surface elevation, actual or
+                                                     types                                   interpolated, N=1 for feet, 
+                                                                                             2 for meters, 6 for feet below datum,
+                                                                                             and 7 for meters below datum.
+                                                                                             Elevation value in four spaces,
+                                                                                             right justified.
+
+
+
+
+                     APPENDIX D.--DLG Attribute Codes--continued
+__________________________________________________________________________________________________________________________________
+                                                                            MAJOR   MINOR
+DATA CATEGORY             TYPE OF CODE              APPLICATION             CODE    CODE                  DESCRIPTION
+__________________________________________________________________________________________________________________________________
+Hydrography               Parameter (cont'd.)       Multiple element         053     0---   Angle of clockwise rotation (nearest
+ (cont'd.)                                           types (cont'd)                          whole degree)
+                                                                             055     ----   River mile, value in four spaces, 
+                                                                                             right justified
+                                                                             058     0000   Best estimate of classification
+                                                                                             or position 
+                                                                             059     00--   Coincident feature
+__________________________________________________________________________________________________________________________________
+Boundaries                Feature identification    Nodes                    090     0001   Monumented point on a boundary
+
+                                                    Areas                    090     0100   Civil township, district, precinct,
+                                                                                             or barrio 
+                                                                                     0101   Incorporated city, village, town,
+                                                                                             borough, or hamlet
+                                                                                     0103   National park, monument, lakeshore, 
+                                                                                             seashore, parkway, battlefield, or 
+                                                                                             recreation area
+                                                                                     0104   National forest or grassland
+                                                                                     0105   National wildlife refuge, game 
+                                                                                             preserve, or fish hatchery
+                                                                                     0106   National scenic waterway, riverway,
+                                                                                             wild and scenic river, or 
+                                                                                             wilderness area
+                                                                                     0107   Indian reservation
+                                                                                     0108   Military reservation
+                                                                                     0110   Federal prison
+                                                                                     0111   Miscellaneous Federal reservation
+                                                                                     0129   Miscellaneous State reservation
+                                                                                     0130   State park, recreation area
+                                                                                             arboretum, or lake
+                                                                                     0131   State wildlife refuge, game preserve,
+                                                                                             or fish hatchery
+                                                                                     0132   State forest or grassland
+                                                                                     0133   State prison
+                                                                                     0134   County game preserve
+
+
+
+
+                     APPENDIX D.--DLG Attribute Codes--continued
+__________________________________________________________________________________________________________________________________
+                                                                            MAJOR   MINOR
+DATA CATEGORY             TYPE OF CODE              APPLICATION             CODE    CODE                  DESCRIPTION
+__________________________________________________________________________________________________________________________________
+Boundaries (cont'd.)      Feature identification    Areas (cont'd.)          090     0150   Large park (city, county, or private)
+                           (cont'd.)                                                 0151   Small park (city, county, or private)
+                                                                                     0197   Canada
+                                                                                     0198   Mexico
+                                                                                     0199   Open water
+
+                                                    Lines                    090     0201   Indefinite (or approximate) boundary
+                                                                                     0202   Disputed boundary
+                                                                                     0203   Historical line
+                                                                                     0204   Boundary closure claim
+
+                                                    Points                   090     0301   Reference monuments for boundary
+                                                     (degenerate lines)                      points
+
+                                                    Multiple element         090     NONE
+                                                     types
+
+                          Descriptive               Multiple element         090     0000   Photorevised feature
+                                                     types
+
+                          Parameter                 Multiple element         091     00--   State FIPS code
+                                                     types                   092     0---   County or county equivalent FIPS code
+                                                                             095     ----   Monument number
+                                                                             096     XXYY   Alphabetic portion of any monument 
+                                                                                             number substitute numeric equivalent
+                                                                                             of alphabetic for XX and for YY ass
+                                                                                             follows:  00 blank, 01 = A, 02 = B,
+                                                                                             03 = C, 04 = D, 05 = E, 06 = F,
+                                                                                             07 = G, 08 = H, 09 = I, 10 = J,
+                                                                                             11 = K, 12 = L, 13 = M, 14 = N,
+                                                                                             15 = O, 16 = P, 17 = Q, 18 = R,
+                                                                                             19 = S, 20 = T, 21 = U, 22 = V,
+                                                                                             23 = W, 24 = X, 25 = Y, 26 = Z.
+                                                                             098     0000   Best estimate of classification
+                                                                                             or position.
+                                                                             099     00--   Coincident feature
+
+
+
+
+                     APPENDIX D.--DLG Attribute Codes--continued
+__________________________________________________________________________________________________________________________________
+                                                                            MAJOR   MINOR
+DATA CATEGORY             TYPE OF CODE              APPLICATION             CODE    CODE                  DESCRIPTION
+__________________________________________________________________________________________________________________________________
+Transportation, Roads,    Feature identification    Nodes                    170     0001   Bridge abutment
+  and Trails                                                                         0002   Tunnel portal
+                                                                                     0004   Gate
+                                                                                     0005   Cul-de-sac
+                                                                                     0006   Dead end
+                                                                                     0007   Drawbridge
+
+                                                                             170     0100   Void area
+
+                                 Lines                                       170     0201   Primary route, class 1, symbol
+                                                                                             undivided
+                                                                                     0202   Primary route, class 1, symbol
+                                                                                             divided by centerline
+                                                                                     0203   Primary route, class 1, divided,
+                                                                                             lanes separated
+                                                                                     0204   Primary route, class 1, one way, other
+                                                                                             than divided highway
+                                                                                     0205   Secondary route, class 2, symbol
+                                                                                             undivided 
+                                                                                     0206   Secondary route, class 2, symbol
+                                                                                             divided by centerline
+                                                                                     0207   Secondary route, class 2, symbol
+                                                                                             divided, lanes separated
+                                                                                     0208   Secondary route, class 2, one way,
+                                                                                             other then divided highway
+                                                                                     0209   Road or street, class 3
+                                                                                     0210   Road or street, class 4
+                                                                                     0211   Trail, class 5, other than four-wheel 
+                                                                                             drive vehicle
+                                                                                     0212   Trail, class 5, four-wheel-drive
+                                                                                             vehicle
+                                                                                     0213   Footbridge
+                                                                                     0214   Ferry crossing
+                                                                                     0215   Perimeter of parking area
+                                                                                     0216   Arbitrary extension of line (join or 
+                                                                                             closure)
+
+
+
+
+                     APPENDIX D.--DLG Attribute Codes--continued
+__________________________________________________________________________________________________________________________________
+                                                                            MAJOR   MINOR
+DATA CATEGORY             TYPE OF CODE              APPLICATION             CODE    CODE                  DESCRIPTION
+__________________________________________________________________________________________________________________________________
+Transportation, Roads,    Feature identification    Lines (cont'd.)                  0217   Road or street, class 3, symbol
+  and trails (cont'd.)            (cont'd.)                                                  divided by centerline
+                                                                                     0218   Road or street, class 3, divided
+                                                                                             lanes separated
+                                                                                     0221   Road in street, class 3, one way
+                                                                                     0222   Road in transition
+
+                                                    Points                           NONE   
+                                                     (degenerate lines)
+
+                                                    Multiple element         170     0401   Traffic circle
+                                                     type                            0402   Cloverleaf or interchange
+                                                                                     0403   Toll gate, toll plaza or perimeter or 
+                                                                                             toll plaza
+                                                                                     0404   Weigh station
+                                                                                     0405   Nonstandard section of road
+                                                                            *170     0600   Historical
+
+                          Descriptive               Multiple element         170     0601   In tunnel
+                                                     types                           0602   Overpassing, on bridge
+                                                                                     0603   Under construction, classification
+                                                                                             known
+                                                                                     0604   Under construction, classification
+                                                                                             unknown
+                                                                                     0605   Labeled "old railroad grade"
+                                                                                     0606   Submerged or in ford
+                                                                                     0607   Underpassing
+                                                                                    *0608   Limited access
+                                                                                     0609   Toll road
+                                                                                     0610   Privately operated or controlled
+                                                                                             public access
+                                                                                     0611   Proposed
+                                                                                     0612   Double-decked
+                                                                                     0613   In service facility or rest area
+                                                                                     0614   Elevated
+                                                                                     0615   Bypass route
+                                                                                     0616   Alternate route
+
+
+
+
+                     APPENDIX D.--DLG Attribute Codes--continued
+__________________________________________________________________________________________________________________________________
+                                                                            MAJOR   MINOR
+DATA CATEGORY             TYPE OF CODE              APPLICATION             CODE    CODE                  DESCRIPTION
+__________________________________________________________________________________________________________________________________
+Transportation, Roads,    Descriptive               Multiple element         170     0617   Business route
+ and trails (cont'd.)      (cont'd.)                 types (cont'd.)                 0618   On drawbridge
+                                                                                     0619   Spur
+                                                                                     0620   Loop
+                                                                                     0621   Connector
+                                                                                     0622   Truck route
+                                                                                     0650   Road width 46-55 feet, 0.025 inches
+                                                                                             at 1:24,000
+                                                                                     0651   Road width 56-65 feet, 0.030 inches
+                                                                                             at 1:24,000
+                                                                                     0652   Road width 66-75 feet, 0.035 inches
+                                                                                             at 1:24,000
+                                                                                     0653   Road width 76-85 feet, 0.040 inches
+                                                                                             at 1:24,000
+                                                                                     0654   Road width 86-95 feet, 0.045 inches
+                                                                                             at 1:24,000
+                                                                                     0655   Road width 96-105 feet, 0.050 inches
+                                                                                             at 1:24,000
+                                                                                     0656   Road width 106-115 feet, 0.055 inches
+                                                                                             at 1:24,000
+                                                                                     0657   Road width 116-125 feet, 0.060 inches
+                                                                                             at 1:24,000
+                                                                                     0658   Road width 126-135 feet, 0.065 inches
+                                                                                             at 1:24,000
+                                                                                     0659   Road width 136-145 feet, 0.070 inches
+                                                                                             at 1:24,000
+
+                                                                             170     0000   Photorevised feature
+
+                          Parameter                 Multiple element         171     ----   Number of lanes
+                                                     types                   172     ----   Interstate route number
+                                                                             173     ----   U.S. route number
+                                                                             174     ----   State route number
+                                                                             175     ----   Reservation, park, or military route
+                                                                                             number 
+                                                                             176     ----   County route
+
+
+
+
+                     APPENDIX D.--DLG Attribute Codes--continued
+__________________________________________________________________________________________________________________________________
+                                                                            MAJOR   MINOR
+DATA CATEGORY             TYPE OF CODE              APPLICATION             CODE    CODE                  DESCRIPTION
+__________________________________________________________________________________________________________________________________
+Transportation, Road,     Parameter (cont'd.)       Multiple element         177     XXYY   Alphabetic portion of any route
+ and Trails (cont'd.)                                types (cont'd.)                         number.  Substitute numeric equiva-
+                                                                                             lent of alphabetic for XX and for YY
+                                                                                             as follows:  00 = blank, 01 = A,
+                                                                                             02 = B, 03 = C, 04 = D, 05 = E,
+                                                                                             06 = F, 07 = G, 08 = H, 09 = I,
+                                                                                             10 = J, 11 = K, 12 = L, 13 = M,
+                                                                                             14 = N, 15 = O, 16 = P, 17 = Q,
+                                                                                             18 = R, 19 = S, 20 = T, 21 = U,
+                                                                                             22 = V, 23 = W, 24 = X, 25 = Y,
+                                                                                             26 = Z.
+                                                                             178     0000   Best estimate of position or 
+                                                                                             classification
+                                                                             179     00--   Coincident feature
+__________________________________________________________________________________________________________________________________
+Transportation,           Feature identification    Nodes                    180     0001   Bridge abutment
+  Railroads                                                                          0002   Tunnel portal
+                                                                                     0007   Drawbridge
+
+                                                    Areas                    180     0100   Void area
+
+                                                    Lines                    180     0201   Railroad
+                                                                                     0202   Railroad in street or road
+                                                                                     0204   Carline
+                                                                                     0205   Cog railroad, incline railway,
+                                                                                             logging tram
+                                                                                     0207   Ferry crossing
+                                                                                     0208   Railroad siding
+                                                                                     0209   Perimeter or limit of yard
+                                                                                     0210   Arbitrary line extension
+                                                                                     0211   Closure line
+
+                                                    Points                   180     NONE   
+                                                     (degenerate lines)
+
+                                                    Multiple element         180     0400   Railroad station, perimeter of
+                                                     types                                   station
+                                                                                     0401   Turntable
+                                                                                     0402   Roundhouse
+                                                                            *180     0600   Historical
+
+
+
+
+                     APPENDIX D.--DLG Attribute Codes--continued
+__________________________________________________________________________________________________________________________________
+                                                                            MAJOR   MINOR
+DATA CATEGORY             TYPE OF CODE              APPLICATION             CODE    CODE                  DESCRIPTION
+__________________________________________________________________________________________________________________________________
+Transportation,           Descriptive               Multiple element         180     0601   In tunnel
+  Railroads (cont'd.)                                types                           0602   Overpassing, on bridge
+                                                                                     0603   Abandoned
+                                                                                     0604   Dismantled
+                                                                                     0605   Underpassing
+                                                                                     0606   Narrow gauge
+                                                                                     0607   In snowshed or under structure
+                                                                                     0608   Under construction
+                                                                                     0609   Elevated 
+                                                                                     0610   Rapid transit
+                                                                                     0611   On drawbridge
+                                                                                     0612   Private 
+                                                                                     0613   U.S. Government
+                                                                                     0614   Juxtaposition
+
+                                                                             180     0000   Photorevised feature
+                          Parameter                 Multiple element         181     ----   Number of tracks
+                                                     types                   188     0000   Best estimate of position or 
+                                                                                             classification
+                                                                             189     00--   Coincident feature
+__________________________________________________________________________________________________________________________________
+Transportation,           Feature identification    Nodes                    190     0001   End of transmission line at
+ Pipelines, Trans-                                                                           power station, substation, or
+ mission Lines,                                                                              hydroelectric plant
+ Miscellaneous Trans-                                                                0002   End of pipeline at oil or gas
+ portation Features                                                                          field
+                                                                                     0003   End of pipeline at refinery,
+                                                                                             depot, or tank farm
+
+                                                    Areas                    190     0100   Void area
+
+                                                    Lines                    190     0201   Pipeline
+                                                                                     0202   Power transmissiion line
+                                                                                     0203   Telephone or telegraph line
+                                                                                     0204   Aerial tramway, monorail, ski
+                                                                                             lift
+                                                                                     0205   Arbitrary line extension
+                                                                                     0206   Closure line
+__________________________________________________________________________________________________________________________________
+
+
+
+
+                     APPENDIX D.--DLG Attribute Codes--continued
+__________________________________________________________________________________________________________________________________
+                                                                            MAJOR   MINOR
+DATA CATEGORY             TYPE OF CODE              APPLICATION             CODE    CODE                  DESCRIPTION
+__________________________________________________________________________________________________________________________________
+Transportation,           Feature identification    Points                   190     0300   Seaplane anchorage
+ Pipelines, Trans-         (cont'd.)                 (degenerate lines)
+ mission Lines,
+ Miscellaneous Trans-                               Multiple element         190     0400   Power station
+ portation Features (cont'd.)                        types                           0401   Substation
+                                                                                     0402   Hydroelectric Plant
+                                                                                     0403   Landing strip, airport,
+                                                                                             perimeter of airport
+                                                                                     0404   Heliport, perimeter of heliport
+                                                                                     0405   Launch complex, perimeter of 
+                                                                                             launch complex
+                                                                                     0406   Pumping station (other than 
+                                                                                             water)
+                                                                                     0407   Seaplane ramp or landing area
+                                                                                     0408   Measuring station
+
+                          Descriptive               Multiple element         190     0600   Underground
+                                                     types                           0601   Under construction
+                                                                                     0602   Abandoned
+                                                                                     0603   Above ground
+                                                                                     0604   Labeled "closed"
+                                                                                     0605   Unimproved, loose surface
+                                                                                     0606   Submerged 
+                                                                                     0607   Nuclear
+
+                                                                             190     0000   Photorevised feature
+
+                          Parameters                Multiple element         193     0---   Angle of clockwise rotation
+                                                     types                                   (nearest whole degree)
+                                                                             198     0000   Best estimate of position or 
+                                                                                             classification
+                                                                             199     00--   Coincident feature
+__________________________________________________________________________________________________________________________________
+
+
+
+
+                     APPENDIX D.--DLG Attribute Codes--continued
+__________________________________________________________________________________________________________________________________
+                                                                            MAJOR   MINOR
+DATA CATEGORY             TYPE OF CODE              APPLICATION             CODE    CODE                  DESCRIPTION
+__________________________________________________________________________________________________________________________________
+U.S. Public Land          Feature identification    Nodes                    300     0001   U.S. Public Land Survey System
+ Survey System                                                                               section corner
+                                                                                     0002   Point on section line (no corner)
+                                                                                     0003   Closing corner
+                                                                                     0004   Meander corner
+                                                                                     0005   Auxiliary meander corner
+                                                                                     0006   Special meander corner
+                                                                                     0007   Witness corner
+                                                                                     0008   Witness point
+                                                                                     0009   Angle point
+                                                                                     0010   Location monument (includes amended
+                                                                                             monument and mineral monument)
+                                                                                     0011   Reference mark
+                                                                                     0012   Quarter-section corner
+                                                                                     0013   Tract corner
+                                                                                     0014   Land grant corner
+                                                                                     0015   Arbitrary section corner
+
+                                                    Nodes (identifi-         300     0040   Corner identified in field
+                                                     cation procedures)              0041   Corner with horizontal coordinates
+                                                                                     0042     Corner with elevation value
+
+                          Parameters                Areas                                   Select one parameter code from each of
+                                                                                             the following A, B, C, and D lists 
+                                                                                             and/or consult list E.
+
+                                                                                            A.  Origin of Survey
+
+                                                                             306     00--   Insert two-digit code from Appendix K.
+
+                                                                                            B.  Township number(s)
+
+                                                                             30-     ----   Insert 2 for north of the baseline or
+                                                                                             3 for south of the baseline in first
+                                                                                             space.  In the second space, insert
+                                                                                             a 0 for full township, 2 for 1/4
+                                                                                             township, 4 for « township, or 6
+                                                                                             for 3/4 township.  Insert township
+                                                                                             number in the last three spaces,
+                                                                                             right justified.
+
+
+
+
+                     APPENDIX D.--DLG Attribute Codes--continued
+__________________________________________________________________________________________________________________________________
+                                                                            MAJOR   MINOR
+DATA CATEGORY             TYPE OF CODE              APPLICATION             CODE    CODE                  DESCRIPTION
+__________________________________________________________________________________________________________________________________
+U.S. Public Land          Parameters                Areas (cont'd.)                         C.  Range number(s)
+ Survey System             (cont'd.)
+ (cont'd.)                                                                   30-     ----   Insert 4 for east of the principal 
+                                                                                             meridian or 5 for west of the
+                                                                                             principal meridian in the first
+                                                                                             space.  In the second space, insert
+                                                                                             a 0 for a full range, 2 for 1/4
+                                                                                             range, 4 for « range, 6 for 3/4
+                                                                                             range, 8 for duplicate to the north
+                                                                                             or east of the original township, or
+                                                                                             9 for triplicate to the north or
+                                                                                             east of the original township. 
+                                                                                             Insert range number in last three
+                                                                                             spaces, right justified.
+
+                                                                                            D.  Section number
+                                                                             301     ----   In the first space, insert 0 for 
+                                                                                             numeric section identifier, 1 for
+                                                                                             numeric portion of alphanumeric 
+                                                                                             identifier or 2 for alphabetic part
+                                                                                             of alphanumeric identifier.  In the
+                                                                                             last three spaces, insert section
+                                                                                             number or numeric representation of 
+                                                                                             alphabetic character (01-26), right
+                                                                                             justified.
+
+                                                                                            E.  Land grant identifier
+                                                                             307     ----   In the first space, insert the 
+                                                                                             appropriate number:
+
+                                                                                        0   for numeric grant identifier
+                                                                                        1   for numeric portion of alphanumeric
+                                                                                             identifier
+                                                                                        2   for alphabetic portion of
+                                                                                             alphanumeric identifier
+                                                                                        3   for alphabetic identifier
+                                                                                        4   for identifier of named grant in
+                                                                                             Arizona
+
+
+
+
+                     APPENDIX D.--DLG Attribute Codes--continued
+__________________________________________________________________________________________________________________________________
+                                                                            MAJOR   MINOR
+DATA CATEGORY             TYPE OF CODE              APPLICATION             CODE    CODE                  DESCRIPTION
+__________________________________________________________________________________________________________________________________
+U.S. Public Land          Parameters                Areas (cont'd.)                     5   for identifier of named grant in 
+ Survey system             (cont'd.)                                                         California
+ (cont'd.)                                                                              6   for identifier of named grant in 
+                                                                                             Colorado
+                                                                                        7   for identifier of named grant in New
+                                                                                             Mexico
+                                                                                        8   For identifier of named grant in
+                                                                                            other States
+                                                                                            In the last three spaces after 0-3
+                                                                                             above, insert the grant number or
+                                                                                             numeric representation of the 
+                                                                                             alphabetic character (01-26),
+                                                                                             right-justified.  In the last three
+                                                                                             spaces after 4-7 above, insert the
+                                                                                             three-digit code of the named grant
+                                                                                             as designated in Appendix L.
+
+                                                                                            F.  Excluded areas
+                                                                             301     0100   Indian  lands
+                                                                                     0101   Homestead entries
+                                                                                     0102   Donation land claims
+                                                                                     0103   Land grants; civil colonies
+                                                                                     0104   Private extension of public land
+                                                                                             survey
+                                                                                     0105   Area of public and private survey
+                                                                                             overlap
+                                                                                     0106   Overlapping land grants
+                                                                                     0107   Military reservation
+                                                                                     0198   Water
+                                                                                     0199   Unsurveyed area
+
+                          Feature identification    Lines                    300     0201   Approximate position (within 200 feet)
+                                                                                     0202   Protracted position
+                                                                                     0203   Arbitrary closure line
+                                                                                     0204   Base line
+                                                                                     0205   Claim line, grant line
+
+                                                    Points                   0300    0300   Location monument
+                                                     (degenerate lines)              0301   Isolated found section corner
+                                                                                     0302   Witness corner (off surveyed line)
+
+
+
+
+                     APPENDIX D.--DLG Attribute Codes--continued
+__________________________________________________________________________________________________________________________________
+                                                                            MAJOR   MINOR
+DATA CATEGORY             TYPE OF CODE              APPLICATION             CODE    CODE                  DESCRIPTION
+__________________________________________________________________________________________________________________________________
+U.S. Public Land          Parameter                 Multiple element         308     0000   Best estimate of classification and/or
+ Survey System                                       types                                   position
+ (cont'd.)                                                                   309     00--   Coincident feature or symbol
+__________________________________________________________________________________________________________________________________
+* denotes a code which is no longer being used to encode features, but which may appear in older files.
+
+
+
+
+
+              APPENDIX G.--Sample DLG Data File
+                (Optional Distribution Format)
+
+
+
+CARTERSVILLE, GA 01                      1981,        100000.  S01
+RO4.RDS01
+     3     1    16     2 0.25400000000D+01     4     0     4     1
+  -0.840560150000038D+08   0.340260150000010D+08   0.000000000000000D+00
+   0.000000000000000D+00   0.000000000000000D+00   0.000000000000000D+00
+   0.000000000000000D+00   0.000000000000000D+00   0.000000000000000D+00
+   0.000000000000000D+00   0.000000000000000D+00   0.000000000000000D+00
+   0.000000000000000D+00   0.000000000000000D+00   0.000000000000000D+00
+ 0.10000000000D+01 0.00000000000D+00 0.00000000000D+00 0.00000000000D+00
+SW       34.375000  -85.000000         683898.58  3805355.05
+NW       34.500000  -85.000000         683624.37  3819215.69
+NE       34.500000  -84.875000         695100.28  3819449.04
+SE       34.375000  -84.875000         695392.26  3805588.76
+ROADS AND TRAILS       0   551   551 010   198   198 011   747   747   1
+N    1   695392.26  3805588.76           2           0     0
+  -724   725
+N    2   685742.29  3819258.76           3           0     0
+     1    -2   -87
+N    3   684845.86  3819240.53           3           0     0
+    -1   -23  -258
+N    4   687098.38  3819286.33           3           0     0
+     2    -3   -34
+N    5   687347.30  3819288.85           3           0     0
+     3    -4   -35
+N    6   687509.77  3819294.70           3           0     0
+     4    -5   -42
+N    7   687735.78  3819299.29           3           0     0
+     5    -6   -43
+N    8   687842.44  3819301.46           3           0     0
+     6    -7   -45
+N    9   687969.42  3819304.04           3           0     0
+     7    -8   -67
+N   10   688042.96  3819310.62           3           0     0
+     8    -9   -66
+N   11   688129.40  3819307.29           3           0     0
+     9   -10   -80
+N   12   688462.08  3819314.06           3           0     0
+    10   -11   -31
+N   13   688632.22  3819317.52           3           0     0
+    11   -12   -32
+N   14   688837.92  3819321.70           3           0     0
+    12   -13   -24
+N   15   688947.12  3819323.92           3           0     0
+    13   -14   -26
+N   16   689490.57  3819334.97           3           0     0
+    14   -15  -154
+
+
+
+
+              APPENDIX G.--Sample DLG Data File
+          (Optional Distribution Format)--continued
+
+
+A    1   689504.51  3812402.15          61    65     1     0     0
+  -695  -701  -705  -706  -708  -710  -713  -714  -716  -718  -719  -722
+  -723   724   725   693   692   645   576   554   553   545   536   512
+   483   407   244   218   146   113    94    21    20    19    18    17
+    16    15    14    13    12    11    10     9     8     7     6     5
+     4     3     2     1  -258  -399  -464  -509  -543  -547  -557  -620
+  -689
+     0     0
+A    2   689504.51  3812402.15           3     4     0     0     0
+   726  -725  -724
+A    3   689504.51  3812402.15          10    32     0     0     0
+    87    -2   -34   -40    41   -49   -48   -74  -105   104
+A    4   689504.51  3812402.15           8    25     0     0     0
+   136    23    -1   -87  -137  -139  -141   140
+A    5   689504.51  3812402.15           5    28     0     0     0
+  -259   258   -23  -136  -237
+A    6   689504.51  3812402.15           3     8     0     0     0
+    34    -3   -35
+A    7   689504.51  3812402.15           5     9     0     0     0
+    59    35    -4   -42   -58
+A    8   689504.51  3812402.15           4     6     0     0     0
+    42    -5   -43    44
+A    9   689504.51  3812402.15           4     5     0     0     0
+    43    -6   -45    46
+A   10   689504.51  3812402.15           4     8     0     0     0
+    45    -7   -67    70
+A   11   689504.51  3812402.15           4     6     0     0     0
+    67    -8   -66   -68
+A   12   689504.51  3812402.15           5     6     0     0     0
+    66    -9   -80    79    78
+A   13   689504.51  3812402.15          17    34     0     0     1
+   106   -79    80   -10   -31   -37  -111  -110  -115  -117  -119  -121
+   122     0    84    85   -83
+A   14   689504.51  3812402.15           3     8     0     0     0
+    31   -11   -32
+A   15   689504.51  3812402.15           5     9     0     0     0
+    39    32   -12   -24   -38
+A   16   689504.51  3812402.15           4     6     0     0     0
+    24   -13   -26    25
+L    1     2     3     4     1                 2     0     0
+   685742.29  3819258.76   684845.86  3819240.53
+L    2     4     2     3     1                 2     0     0
+   687098.38  3819286.33   685742.29  3819258.76
+L    3     5     4     6     1                 2     0     0
+   687347.30  3819288.85   687098.38  3819286.33
+L    4     6     5     7     1                 2     0     0
+   687509.77  3819294.70   687347.30  3819288.85
+L    5     7     6     8     1                 2     0     0
+   687735.78  3819299.29   687509.77  3819294.70
+
+
+
+
+              APPENDIX G.--Sample DLG Data File
+          (Optional Distribution Format)--continued
+
+
+L    6     8     7     9     1                 2     0     0
+   687842.44  3819301.46   687735.78  3819299.29
+L    7     9     8    10     1                 2     0     0
+   687969.42  3819304.04   687842.44  3819301.46
+L    8    10     9    11     1                 2     0     0
+   688042.96  3819310.62   687969.42  3819304.04
+L    9    11    10    12     1                 2     0     0
+   688129.40  3819307.29   688042.96  3819310.62
+L   10    12    11    13     1                 2     0     0
+   688462.08  3819314.06   688129.40  3819307.29
+L   11    13    12    14     1                 2     0     0
+   688632.22  3819317.52   688462.08  3819314.06
+L   12    14    13    15     1                 2     0     0
+   688837.92  3819321.70   688632.22  3819317.52
+L   13    15    14    16     1                 2     0     0
+   688947.12  3819323.92   688837.92  3819321.70
+L   14    16    15    17     1                 2     0     0
+   689490.57  3819334.97   688947.12  3819323.92
+L   15    17    16    18     1                 2     0     0
+   690620.63  3819357.95   689490.57  3819334.97
+L   16    18    17    18     1                 2     0     0
+   690925.37  3819364.15   690620.63  3819357.95
+L   17    19    18    19     1                 2     0     0
+   691189.48  3819369.52   690925.37  3819364.15
+L   18    20    19    20     1                 2     0     0
+   691605.95  3819377.98   691189.48  3819369.52
+L   19    21    20    21     1                 2     0     0
+   693447.02  3819417.96   691605.95  3819377.98
+L   20    22    21    21     1                 2     0     0
+   694107.34  3819428.85   693447.02  3819417.96
+L   21    23    22    22     1                 2     0     0
+   694914.90  3819445.27   694107.34  3819428.85
+L   22    24    22    21    22                 3     1     0
+   694205.18  3819364.78   694194.61  3819384.89   694107.34  3819428.85
+   170   205
+L   23    25     3     5     4                 3     1     0
+   684825.58  3819113.09   684849.22  3819199.95   684845.86  3819240.53
+   170   209
+L   24    26    14    15    16                 2     1     0
+   688848.53  3819174.57   688837.92  3819321.70
+   170   210
+L   25    27    26    25    16                 2     1     0
+   688901.91  3819173.11   688848.53  3819174.57
+   170   210
+L   26    27    15    16    17                 3     1     0
+   688901.91  3819173.11   688939.85  3819181.50   688947.12  3819323.92
+   170   210
+L   27    28    17    18    18                 3     1     0
+   690593.10  3819212.58   690628.40  3819226.00   690620.63  3819357.95
+   170   209
+
+
+
+
+              APPENDIX G.--Sample DLG Data File
+          (Optional Distribution Format)--continued
+
+
+L   28    29    25     4     4                 3     1     0
+   685461.28  3819085.37   685414.90  3819117.45   684825.58  3819113.09
+   170   209
+L   29    30    24    24    22                 2     1     0
+   694605.90  3819273.85   694205.18  3819364.78
+   170   205
+L   30    30    23    22    23                 4     1     0
+   694605.90  3819273.85   694615.85  3819284.21   694851.82  3819423.66
+   694914.90  3819445.27
+   170   205
+L   31    31    12    13    14                 4     1     0
+   688529.54  3819119.81   688480.67  3819149.30   688470.15  3819166.87
+   688462.08  3819314.06
+   170   210
+L   32    31    13    14    15                 4     1     0
+   688529.54  3819119.81   688611.12  3819230.71   688637.82  3819292.23
+   688632.22  3819317.52
+   170   210
+L   33    32    21    21    21                 2     1     0
+   693453.44  3819227.55   693447.02  3819417.96
+   170   209
+L   34    33     4     3     6                 4     1     0
+   687148.37  3819076.48   687139.15  3819155.05   687123.40  3819180.14
+   687098.38  3819286.33
+   170   210
+L   35    33     5     6     7                 4     1     0
+   687148.37  3819076.48   687172.89  3819120.17   687342.68  3819265.89
+   687347.30  3819288.85
+   170   201

Added: packages/drawmap/branches/upstream/current/attributes
===================================================================
--- packages/drawmap/branches/upstream/current/attributes	                        (rev 0)
+++ packages/drawmap/branches/upstream/current/attributes	2007-05-18 12:29:03 UTC (rev 834)
@@ -0,0 +1,700 @@
+# =========================================================================
+# attributes - A sample attribute file
+# Copyright (c) 1997,1998,1999,2000,2001  Fred M. Erickson
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
+# =========================================================================
+
+
+
+
+
+##################################################
+### Codes to select entire groups of features. ###
+##################################################
+# L 020 -1	all Hypsography
+L 050 -1	all Hydrography
+# L 090 -1	all Boundaries
+# L 170 -1	all Roads and Trails
+# L 180 -1	all Railroads
+# L 190 -1	all Pipelines, Transmission Lines, Miscellaneous Transportation Features
+# L 300 -1	all U.S. Public Land Survey System
+
+
+
+
+
+
+
+##################################################
+### Some useful codes for selecting common     ###
+### features on an individual basis.           ###
+##################################################
+#
+# Lines
+#
+L 170 201	primary route, class 1, symbol undivided
+L 170 202	primary route, class 1, symbol divided by centerline
+L 170 203	primary route, class 1, symbol divided, lanes separated
+L 170 204	primary route, class 1, symbol one way, other than divided highway
+# L 50 200	shoreline
+# L 50 201	manmade shoreline
+# L 50 202	closure line
+# L 50 406	dam or weir
+# L 50 412	stream
+# L 50 413	braided stream
+# L 50 414	ditch or canal
+# L 50 415	aqueduct
+# L 50 419	channel in water area
+# L 50 421	lake or pond
+# L 50 605	right bank
+# L 50 606	left bank
+# L 50 610	intermittent
+
+#
+# Areas
+#
+A 50 101	reservoir
+# A 50 408	spillway
+A 50 412	stream
+A 50 421	lake or pond
+
+
+
+
+
+
+
+################################################################################
+###  A big list of possible codes, included for completeness.  These were    ###
+###  generated by pulling all of the available codes out of the attribute    ###
+###  description document.  They are not all guaranteed to make sense, since ###
+###  they were generated by a mechanical process, but they may prove useful, ###
+###  nonetheless.  In any case, they should give you an idea of the kinds of ###
+###  attributes that can be defined in a DLG file.                           ###
+###                                                                          ###
+###  These are probably also somewhat incomplete and out of date.  If you    ###
+###  want to do a lot of work with attributes, it would be worthwhile to     ###
+###  get copies of the relevant USGS standards documents for a more          ###
+###  up-to-date description of the various attributes.                       ###
+################################################################################
+
+################################################################################
+#                               Hypsography                                    #
+################################################################################
+# A  20  100    Void area
+# A  20  600    Decimal fractions of feet or meters
+# A  20  601    Decimal fractions of feet or meters
+# A  20  602    Decimal fractions of feet or meters
+# A  20  603    Decimal fractions of feet or meters
+# A  20  604    Decimal fractions of feet or meters
+# A  20  605    Decimal fractions of feet or meters
+# A  20  606    Decimal fractions of feet or meters
+# A  20  607    Decimal fractions of feet or meters
+# A  20  608    Decimal fractions of feet or meters
+# A  20  609    Decimal fractions of feet or meters
+# A  20  610    Approximate
+# A  20  611    Depression
+# A  20  612    Glacier or snow field
+# A  20  613    Underwater
+# A  20  614    Best estimate of contour elevation value
+# A  20    0    Photorevised feature
+# L  20  200    Contour (index or intermediate)
+# L  20  201    Carrying contour
+# L  20  202    Supplementary contour
+# L  20  203    Continuation contour
+# L  20  204    Amended contour
+# L  20  205    Bathymetric contour
+# L  20  206    Depth curve
+# L  20  207    Watershed divides
+# L  20  208    Closure line
+# L  20  300    Spot elevation, less than third order
+# L  20  301    Spot elevation, less than third order, not at ground level.
+# L  20  600    Decimal fractions of feet or meters
+# L  20  601    Decimal fractions of feet or meters
+# L  20  602    Decimal fractions of feet or meters
+# L  20  603    Decimal fractions of feet or meters
+# L  20  604    Decimal fractions of feet or meters
+# L  20  605    Decimal fractions of feet or meters
+# L  20  606    Decimal fractions of feet or meters
+# L  20  607    Decimal fractions of feet or meters
+# L  20  608    Decimal fractions of feet or meters
+# L  20  609    Decimal fractions of feet or meters
+# L  20  610    Approximate
+# L  20  611    Depression
+# L  20  612    Glacier or snow field
+# L  20  613    Underwater
+# L  20  614    Best estimate of contour elevation value
+# L  20    0    Photorevised feature
+# L  2N ----   Elevation in whole feet or meters, right-justified
+# L  26 00--   Major category associated with a spot height, not at ground elevation.
+# L  29 00--   Coincident feature
+
+################################################################################
+#                               Hydrography                                    #
+################################################################################
+# N  50    1   Upper origin of stream
+# N  50    2   Upper origin of stream at water body
+# N  50    3   Sink, channel no longer evident
+# N  50    4   Stream entering water body
+# N  50    5   Steam exiting water body
+# A  50  100   Alkali flat
+# A  50  101   Reservoir 
+# A  50  102   Covered reservoir
+# A  50  103   Glacier or permanent snowfield
+# A  50  104   Salt evaporator
+# A  50  105   Inundation area
+# A  50  106   Fish hatchery or farm
+# A  50  107   Industrial water impoundment
+# A  50  108   Area to be submerged
+# A  50  109   Sewage disposal pond or filtration beds
+# A  50  110   Tailings pond
+# A  50  111   Marsh, wetland, swamp, bog
+# A  50  112   Mangrove area
+# A  50  113   Rice field
+# A  50  114   Cranberry bog
+# A  50  115   Flats (tidal, mud, sand, gravel)
+# A  50  116   Bays, estuaries, gulfs, oceans, seas
+# A  50  117   Shoal 
+# A  50  118   Soda evaporator
+# A  50  119   Duck Pond
+# A  50  120   Void area
+# A  50  400   Rapids
+# A  50  401   Falls
+# A  50  402   Gravel pit or quarry filled with water
+# A  50  403   Gaging station
+# A  50  404   Pumping station
+# A  50  405   Water intake
+# A  50  406   Dam or weir
+# A  50  407   Canal lock or sluice gate
+# A  50  408   Spillway
+# A  50  409   Gate (flood, tidal, head, check)
+# A  50  410   Rock 
+# A  50  411   Crevasse
+# A  50  412   Stream
+# A  50  413   Braided stream
+# A  50  414   Ditch or canal
+# A  50  415   Aqueduct
+# A  50  416   Flume
+# A  50  417   Penstock
+# A  50  418   Siphon
+# A  50  419   Channel in water area
+# A  50  420   Wash or ephemeral drain
+# A  50  421   Lake or pond
+# A  50  422   Coral reef
+# A  50  423   Sand in open water
+# A  50  424   Spoil area
+# A  50  425   Fish ladders
+# A  50  426   Holiday area
+# A  50  601   Underground
+# A  50  602   Overpassing
+# A  50  603   Elevated
+# A  50  604   Tunnel
+# A  50  605   Right bank
+# A  50  606   Left bank
+# A  50  607   Under construction
+# A  50  608   Salt
+# A  50  609   Unsurveyed
+# A  50  610   Intermittent
+# A  50  611   Abandoned or discontinued
+# A  50  612   Submerged or sunken
+# A  50  613   Wooded
+# A  50  614   Dry
+# A  50  615   Mineral or hot (sulphur, alkali, etc.)
+# A  50  616   Navigable, transportation
+# A  50  617   Underpassing
+# A  50  618   Earthen construction
+# A  50  619   Interpolated elevation
+# A  50  621   Decimal fractions of feet or meters
+# A  50  622   Decimal fractions of feet or meters
+# A  50  623   Decimal fractions of feet or meters
+# A  50  624   Decimal fractions of feet or meters
+# A  50  625   Decimal fractions of feet or meters
+# A  50  626   Decimal fractions of feet or meters
+# A  50  627   Decimal fractions of feet or meters
+# A  50  628   Decimal fractions of feet or meters
+# A  50  629   Decimal fractions of feet or meters
+# A  50    0   Photorevised feature
+# A  5N ----   Water surface elevation, actual or interpolated, N=1 for feet, 2 for meters,
+#              6 for feet below datum, and 7 for meters below datum.
+#              Elevation value in four spaces, right justified.
+# A  53 0---   Angle of clockwise rotation (nearest whole degree)
+# A  55 ----   River mile, value in four spaces, right justified
+# A  58 0000   Best estimate of classification or position 
+# A  59 00--   Coincident feature
+# L  50  200   Shoreline
+# L  50  201   Manmade shoreline
+# L  50  202   Closure line
+# L  50  203   Indefinite shoreline
+# L  50  204   Apparent limit
+# L  50  205   Outline of a Carolina bay
+# L  50  206   Danger Curve
+# L  50  207   Apparent shoreline
+# L  50  300   Spring
+# L  50  301   Non-flowing well
+# L  50  302   Flowing well
+# L  50  303   Riser
+# L  50  304   Geyser
+# L  50  305   Windmill
+# L  50  306   Cistern
+# L  50  400   Rapids
+# L  50  401   Falls
+# L  50  402   Gravel pit or quarry filled with water
+# L  50  403   Gaging station
+# L  50  404   Pumping station
+# L  50  405   Water intake
+# L  50  406   Dam or weir
+# L  50  407   Canal lock or sluice gate
+# L  50  408   Spillway
+# L  50  409   Gate (flood, tidal, head, check)
+# L  50  410   Rock 
+# L  50  411   Crevasse
+# L  50  412   Stream
+# L  50  413   Braided stream
+# L  50  414   Ditch or canal
+# L  50  415   Aqueduct
+# L  50  416   Flume
+# L  50  417   Penstock
+# L  50  418   Siphon
+# L  50  419   Channel in water area
+# L  50  420   Wash or ephemeral drain
+# L  50  421   Lake or pond
+# L  50  422   Coral reef
+# L  50  423   Sand in open water
+# L  50  424   Spoil area
+# L  50  425   Fish ladders
+# L  50  426   Holiday area
+# L  50  601   Underground
+# L  50  602   Overpassing
+# L  50  603   Elevated
+# L  50  604   Tunnel
+# L  50  605   Right bank
+# L  50  606   Left bank
+# L  50  607   Under construction
+# L  50  608   Salt
+# L  50  609   Unsurveyed
+# L  50  610   Intermittent
+# L  50  611   Abandoned or discontinued
+# L  50  612   Submerged or sunken
+# L  50  613   Wooded
+# L  50  614   Dry
+# L  50  615   Mineral or hot (sulphur, alkali, etc.)
+# L  50  616   Navigable, transportation
+# L  50  617   Underpassing
+# L  50  618   Earthen construction
+# L  50  619   Interpolated elevation
+# L  50  621   Decimal fractions of feet or meters
+# L  50  622   Decimal fractions of feet or meters
+# L  50  623   Decimal fractions of feet or meters
+# L  50  624   Decimal fractions of feet or meters
+# L  50  625   Decimal fractions of feet or meters
+# L  50  626   Decimal fractions of feet or meters
+# L  50  627   Decimal fractions of feet or meters
+# L  50  628   Decimal fractions of feet or meters
+# L  50  629   Decimal fractions of feet or meters
+# L  50 0000   Photorevised feature
+# L  5N ----   Water surface elevation, actual or interpolated, N=1 for feet, 2 for meters,
+#              6 for feet below datum, and 7 for meters below datum.
+#              Elevation value in four spaces, right justified.
+# L  53 0---   Angle of clockwise rotation (nearest whole degree)
+# L  55 ----   River mile, value in four spaces, right justified
+# L  58 0000   Best estimate of classification or position 
+# L  59 00--   Coincident feature
+
+################################################################################
+#                               Boundaries                                     #
+################################################################################
+# N  90    1   Monumented point on a boundary
+# A  90  100   Civil township, district, precinct, or barrio 
+# A  90  101   Incorporated city, village, town, borough, or hamlet
+# A  90  103   National park, monument, lakeshore, seashore, parkway, battlefield, or recreation area
+# A  90  104   National forest or grassland
+# A  90  105   National wildlife refuge, game preserve, or fish hatchery
+# A  90  106   National scenic waterway, riverway, wild and scenic river, or wilderness area
+# A  90  107   Indian reservation
+# A  90  108   Military reservation
+# A  90  110   Federal prison
+# A  90  111   Miscellaneous Federal reservation
+# A  90  129   Miscellaneous State reservation
+# A  90  130   State park, recreation area arboretum, or lake
+# A  90  131   State wildlife refuge, game preserve, or fish hatchery
+# A  90  132   State forest or grassland
+# A  90  133   State prison
+# A  90  134   County game preserve
+# A  90  150   Large park (city, county, or private)
+# A  90  151   Small park (city, county, or private)
+# A  90  197   Canada
+# A  90  198   Mexico
+# A  90  199   Open water
+# A  90 0000   Photorevised feature
+# A  91 00--   State FIPS code
+# A  92 0---   County or county equivalent FIPS code
+# A  95 ----   Monument number
+# A  96 XXYY   Alphabetic portion of any monument number substitute numeric equivalent
+#              of alphabetic for XX and for YY as follows:
+#                00 blank, 01 = A, 02 = B, 03 = C, 04 = D, 05 = E, 06 = F,
+#                07 = G, 08 = H, 09 = I, 10 = J, 11 = K, 12 = L, 13 = M,
+#                14 = N, 15 = O, 16 = P, 17 = Q, 18 = R, 19 = S, 20 = T,
+#                21 = U, 22 = V, 23 = W, 24 = X, 25 = Y, 26 = Z.
+# A  98 0000   Best estimate of classification or position.
+# A  99 00--   Coincident feature
+# L  90  201   Indefinite (or approximate) boundary
+# L  90  202   Disputed boundary
+# L  90  203   Historical line
+# L  90  204   Boundary closure claim
+# L  90  301   Reference monuments for boundary points
+# L  90 0000   Photorevised feature
+# L  91 00--   State FIPS code
+# L  92 0---   County or county equivalent FIPS code
+# L  95 ----   Monument number
+# L  96 XXYY   Alphabetic portion of any monument number substitute numeric equivalent
+#              of alphabetic for XX and for YY as follows:
+#                00 blank, 01 = A, 02 = B, 03 = C, 04 = D, 05 = E, 06 = F,
+#                07 = G, 08 = H, 09 = I, 10 = J, 11 = K, 12 = L, 13 = M,
+#                14 = N, 15 = O, 16 = P, 17 = Q, 18 = R, 19 = S, 20 = T,
+#                21 = U, 22 = V, 23 = W, 24 = X, 25 = Y, 26 = Z.
+# L  98 0000   Best estimate of classification or position.
+# L  99 00--   Coincident feature
+
+################################################################################
+#                   Transportation, Roads, and Trails                          #
+################################################################################
+# N 170    1   Bridge abutment
+# N 170    2   Tunnel portal
+# N 170    4   Gate
+# N 170    5   Cul-de-sac
+# N 170    6   Dead end
+# N 170    7   Drawbridge
+# A 170  100   Void area
+# A 170  401   Traffic circle
+# A 170  402   Cloverleaf or interchange
+# A 170  403   Toll gate, toll plaza or perimeter or toll plaza
+# A 170  404   Weigh station
+# A 170  405   Nonstandard section of road
+# A 170  600   Historical
+# A 170  601   In tunnel
+# A 170  602   Overpassing, on bridge
+# A 170  603   Under construction, classification known
+# A 170  604   Under construction, classification unknown
+# A 170  605   Labeled "old railroad grade"
+# A 170  606   Submerged or in ford
+# A 170  607   Underpassing
+# A 170  608   Limited access
+# A 170  609   Toll road
+# A 170  610   Privately operated or controlled public access
+# A 170  611   Proposed
+# A 170  612   Double-decked
+# A 170  613   In service facility or rest area
+# A 170  614   Elevated
+# A 170  615   Bypass route
+# A 170  616   Alternate route
+# A 170  617   Business route
+# A 170  618   On drawbridge
+# A 170  619   Spur
+# A 170  620   Loop
+# A 170  621   Connector
+# A 170  622   Truck route
+# A 170  650   Road width 46-55 feet, 0.025 inches at 1:24,000
+# A 170  651   Road width 56-65 feet, 0.030 inches at 1:24,000
+# A 170  652   Road width 66-75 feet, 0.035 inches at 1:24,000
+# A 170  653   Road width 76-85 feet, 0.040 inches at 1:24,000
+# A 170  654   Road width 86-95 feet, 0.045 inches at 1:24,000
+# A 170  655   Road width 96-105 feet, 0.050 inches at 1:24,000
+# A 170  656   Road width 106-115 feet, 0.055 inches at 1:24,000
+# A 170  657   Road width 116-125 feet, 0.060 inches at 1:24,000
+# A 170  658   Road width 126-135 feet, 0.065 inches at 1:24,000
+# A 170  659   Road width 136-145 feet, 0.070 inches at 1:24,000
+# A 170    0   Photorevised feature
+# A 171 ----   Number of lanes
+# A 172 ----   Interstate route number
+# A 173 ----   U.S. route number
+# A 174 ----   State route number
+# A 175 ----   Reservation, park, or military route
+# A             number 
+# A 176 ----   County route
+# A 177 XXYY   Alphabetic portion of any route number.  Substitute numeric equivalent
+#              of alphabetic for XX and for YY as follows:
+#                00 = blank, 01 = A, 02 = B, 03 = C, 04 = D, 05 = E, 06 = F,
+#                07 = G, 08 = H, 09 = I, 10 = J, 11 = K, 12 = L, 13 = M,
+#                14 = N, 15 = O, 16 = P, 17 = Q, 18 = R, 19 = S, 20 = T,
+#                21 = U, 22 = V, 23 = W, 24 = X, 25 = Y, 26 = Z.
+# A 178 0000   Best estimate of position or classification
+# A 179 00--   Coincident feature
+# L 170  201   Primary route, class 1, symbol undivided
+# L 170  202   Primary route, class 1, symbol divided by centerline
+# L 170  203   Primary route, class 1, divided, lanes separated
+# L 170  204   Primary route, class 1, one way, other than divided highway
+# L 170  205   Secondary route, class 2, symbol undivided 
+# L 170  206   Secondary route, class 2, symbol divided by centerline
+# L 170  207   Secondary route, class 2, symbol divided, lanes separated
+# L 170  208   Secondary route, class 2, one way, other then divided highway
+# L 170  209   Road or street, class 3
+# L 170  210   Road or street, class 4
+# L 170  211   Trail, class 5, other than four-wheel drive vehicle
+# L 170  212   Trail, class 5, four-wheel-drive vehicle
+# L 170  213   Footbridge
+# L 170  214   Ferry crossing
+# L 170  215   Perimeter of parking area
+# L 170  216   Arbitrary extension of line (join or closure)
+# L 170  217   Road or street, class 3, symbol and trails divided by centerline
+# L 170  218   Road or street, class 3, divided lanes separated
+# L 170  221   Road in street, class 3, one way
+# L 170  222   Road in transition
+# L 170  401   Traffic circle
+# L 170  402   Cloverleaf or interchange
+# L 170  403   Toll gate, toll plaza or perimeter or toll plaza
+# L 170  404   Weigh station
+# L 170  405   Nonstandard section of road
+# L 170  600   Historical
+# L 170  601   In tunnel
+# L 170  602   Overpassing, on bridge
+# L 170  603   Under construction, classification known
+# L 170  604   Under construction, classification unknown
+# L 170  605   Labeled "old railroad grade"
+# L 170  606   Submerged or in ford
+# L 170  607   Underpassing
+# L 170  608   Limited access
+# L 170  609   Toll road
+# L 170  610   Privately operated or controlled public access
+# L 170  611   Proposed
+# L 170  612   Double-decked
+# L 170  613   In service facility or rest area
+# L 170  614   Elevated
+# L 170  615   Bypass route
+# L 170  616   Alternate route
+# L 170  617   Business route
+# L 170  618   On drawbridge
+# L 170  619   Spur
+# L 170  620   Loop
+# L 170  621   Connector
+# L 170  622   Truck route
+# L 170  650   Road width 46-55 feet, 0.025 inches at 1:24,000
+# L 170  651   Road width 56-65 feet, 0.030 inches at 1:24,000
+# L 170  652   Road width 66-75 feet, 0.035 inches at 1:24,000
+# L 170  653   Road width 76-85 feet, 0.040 inches at 1:24,000
+# L 170  654   Road width 86-95 feet, 0.045 inches at 1:24,000
+# L 170  655   Road width 96-105 feet, 0.050 inches at 1:24,000
+# L 170  656   Road width 106-115 feet, 0.055 inches at 1:24,000
+# L 170  657   Road width 116-125 feet, 0.060 inches at 1:24,000
+# L 170  658   Road width 126-135 feet, 0.065 inches at 1:24,000
+# L 170  659   Road width 136-145 feet, 0.070 inches at 1:24,000
+# L 170 0000   Photorevised feature
+# L 171 ----   Number of lanes
+# L 172 ----   Interstate route number
+# L 173 ----   U.S. route number
+# L 174 ----   State route number
+# L 175 ----   Reservation, park, or military route number 
+# L 176 ----   County route
+# L 177 XXYY   Alphabetic portion of any route number.  Substitute numeric equivalent
+#              of alphabetic for XX and for YY as follows:
+#                00 = blank, 01 = A, 02 = B, 03 = C, 04 = D, 05 = E, 06 = F,
+#                07 = G, 08 = H, 09 = I, 10 = J, 11 = K, 12 = L, 13 = M,
+#                14 = N, 15 = O, 16 = P, 17 = Q, 18 = R, 19 = S, 20 = T,
+#                21 = U, 22 = V, 23 = W, 24 = X, 25 = Y, 26 = Z.
+# L 178 0000   Best estimate of position or classification
+# L 179 00--   Coincident feature
+
+################################################################################
+#                          Transportation, Railroads                           #
+################################################################################
+# N 180    1   Bridge abutment
+# N 180    2   Tunnel portal
+# N 180    7   Drawbridge
+# A 180  100   Void area
+# A 180  400   Railroad station, perimeter of station
+# A 180  401   Turntable
+# A 180  402   Roundhouse
+# A 180  600   Historical
+# A 180  601   In tunnel
+# A 180  602   Overpassing, on bridge
+# A 180  603   Abandoned
+# A 180  604   Dismantled
+# A 180  605   Underpassing
+# A 180  606   Narrow gauge
+# A 180  607   In snowshed or under structure
+# A 180  608   Under construction
+# A 180  609   Elevated 
+# A 180  610   Rapid transit
+# A 180  611   On drawbridge
+# A 180  612   Private 
+# A 180  613   U.S. Government
+# A 180  614   Juxtaposition
+# A 180 0000   Photorevised feature
+# A 181 ----   Number of tracks
+# A 188 0000   Best estimate of position or classification
+# A 189 00--   Coincident feature
+# L 180  201   Railroad
+# L 180  202   Railroad in street or road
+# L 180  204   Carline
+# L 180  205   Cog railroad, incline railway, logging tram
+# L 180  207   Ferry crossing
+# L 180  208   Railroad siding
+# L 180  209   Perimeter or limit of yard
+# L 180  210   Arbitrary line extension
+# L 180  211   Closure line
+# L 180  400   Railroad station, perimeter of station
+# L 180  401   Turntable
+# L 180  402   Roundhouse
+# L 180  600   Historical
+# L 180  601   In tunnel
+# L 180  602   Overpassing, on bridge
+# L 180  603   Abandoned
+# L 180  604   Dismantled
+# L 180  605   Underpassing
+# L 180  606   Narrow gauge
+# L 180  607   In snowshed or under structure
+# L 180  608   Under construction
+# L 180  609   Elevated 
+# L 180  610   Rapid transit
+# L 180  611   On drawbridge
+# L 180  612   Private 
+# L 180  613   U.S. Government
+# L 180  614   Juxtaposition
+# L 180 0000   Photorevised feature
+# L 181 ----   Number of tracks
+# L 188 0000   Best estimate of position or classification
+# L 189 00--   Coincident feature
+
+################################################################################
+#  Transportation, Pipelines, Transmission Lines, Misc Transportation Features #
+################################################################################
+# N 190    1   End of transmission line at power station, substation, or hydroelectric plant
+# N 190    2   End of pipeline at oil or gas field
+# N 190    3   End of pipeline at refinery, depot, or tank farm
+# A 190  100   Void area
+# A 190  400   Power station
+# A 190  401   Substation
+# A 190  402   Hydroelectric Plant
+# A 190  403   Landing strip, airport, perimeter of airport
+# A 190  404   Heliport, perimeter of heliport
+# A 190  405   Launch complex, perimeter of launch complex
+# A 190  406   Pumping station (other than water)
+# A 190  407   Seaplane ramp or landing area
+# A 190  408   Measuring station
+# A 190  600   Underground
+# A 190  601   Under construction
+# A 190  602   Abandoned
+# A 190  603   Above ground
+# A 190  604   Labeled "closed"
+# A 190  605   Unimproved, loose surface
+# A 190  606   Submerged 
+# A 190  607   Nuclear
+# A 190    0   Photorevised feature
+# A 193 0---   Angle of clockwise rotation types (nearest whole degree)
+# A 198 0000   Best estimate of position or classification
+# A 199 00--   Coincident feature
+# L 190  201   Pipeline
+# L 190  202   Power transmissiion line
+# L 190  203   Telephone or telegraph line
+# L 190  204   Aerial tramway, monorail, ski lift
+# L 190  205   Arbitrary line extension
+# L 190  206   Closure line
+# L 190  300   Seaplane anchorage
+# L 190  400   Power station
+# L 190  401   Substation
+# L 190  402   Hydroelectric Plant
+# L 190  403   Landing strip, airport, perimeter of airport
+# L 190  404   Heliport, perimeter of heliport
+# L 190  405   Launch complex, perimeter of launch complex
+# L 190  406   Pumping station (other than water)
+# L 190  407   Seaplane ramp or landing area
+# L 190  408   Measuring station
+# L 190  600   Underground
+# L 190  601   Under construction
+# L 190  602   Abandoned
+# L 190  603   Above ground
+# L 190  604   Labeled "closed"
+# L 190  605   Unimproved, loose surface
+# L 190  606   Submerged 
+# L 190  607   Nuclear
+# L 190    0   Photorevised feature
+# L 193 0---   Angle of clockwise rotation types (nearest whole degree)
+# L 198 0000   Best estimate of position or classification
+# L 199 00--   Coincident feature
+
+################################################################################
+#                       U.S. Public Land Survey System                         #
+################################################################################
+# N 300    1   U.S. Public Land Survey System section corner
+# N 300    2   Point on section line (no corner)
+# N 300    3   Closing corner
+# N 300    4   Meander corner
+# N 300    5   Auxiliary meander corner
+# N 300    6   Special meander corner
+# N 300    7   Witness corner
+# N 300    8   Witness point
+# N 300    9   Angle point
+# N 300   10   Location monument (includes amended monument and mineral monument)
+# N 300   11   Reference mark
+# N 300   12   Quarter-section corner
+# N 300   13   Tract corner
+# N 300   14   Land grant corner
+# N 300   15   Arbitrary section corner
+# N 300   40   Corner identified in field
+# N 300   41   Corner with horizontal coordinates
+# N 300   42     Corner with elevation value
+# A 306 00--   Origin of Survey:  Insert two-digit code from Appendix K.
+# A 30- ----   Township number(s):  Insert 2 for north of the baseline or 3 for south of the
+#              baseline in first space.  In the second space, insert a 0 for full township,
+#              2 for 1/4 township, 4 for « township, or 6 for 3/4 township.  Insert township
+#              number in the last three spaces, right justified.
+# A 30- ----   Range number(s):  Insert 4 for east of the principal meridian or 5 for west of
+#              the principal meridian in the first space.  In the second space, insert a 0
+#              for a full range, 2 for 1/4 range, 4 for « range, 6 for 3/4 range,
+#              8 for duplicate to the north or east of the original township, or 9 for
+#              triplicate to the north or east of the original township.
+#              Insert range number in last three spaces, right justified.
+# A 301 ----   Section number:  In the first space, insert 0 for numeric section identifier,
+#              1 for numeric portion of alphanumeric identifier or 2 for alphabetic part of
+#              alphanumeric identifier.  In the last three spaces, insert section number or
+#              numeric representation of alphabetic character (01-26), right justified.
+# A 307 ----   Land grant identifier:  In the first space, insert the appropriate number:
+#                0   for numeric grant identifier
+#                1   for numeric portion of alphanumeric identifier
+#                2   for alphabetic portion of alphanumeric identifier
+#                3   for alphabetic identifier
+#                4   for identifier of named grant in Arizona
+#                5   for identifier of named grant in California
+#                6   for identifier of named grant in Colorado
+#                7   for identifier of named grant in New Mexico
+#                8   For identifier of named grant in other States
+#              In the last three spaces after 0-3 above, insert the grant number or numeric
+#              representation of the alphabetic character (01-26), right-justified.  In the
+#              last three spaces after 4-7 above, insert the three-digit code of the named
+#              grant as designated in Appendix L.
+# A 301  100   Excluded areas:  Indian  lands
+# A 301  101   Excluded areas:  Homestead entries
+# A 301  102   Excluded areas:  Donation land claims
+# A 301  103   Excluded areas:  Land grants; civil colonies
+# A 301  104   Excluded areas:  Private extension of public land survey
+# A 301  105   Excluded areas:  Area of public and private survey overlap
+# A 301  106   Excluded areas:  Overlapping land grants
+# A 301  107   Excluded areas:  Military reservation
+# A 301  198   Excluded areas:  Water
+# A 301  199   Excluded areas:  Unsurveyed area
+# A 308 0000   Best estimate of classification and/or position
+# A 309 00--   Coincident feature or symbol
+# L 300  201   Approximate position (within 200 feet)
+# L 300  202   Protracted position
+# L 300  203   Arbitrary closure line
+# L 300  204   Base line
+# L 300  205   Claim line, grant line
+# L 300  300   Location monument
+# L 300  301   Isolated found section corner
+# L 300  302   Witness corner (off surveyed line)
+# L 308 0000   Best estimate of classification and/or position
+# L 309 00--   Coincident feature or symbol
+################################################################################
+################################################################################

Added: packages/drawmap/branches/upstream/current/big_buf_io.c
===================================================================
--- packages/drawmap/branches/upstream/current/big_buf_io.c	                        (rev 0)
+++ packages/drawmap/branches/upstream/current/big_buf_io.c	2007-05-18 12:29:03 UTC (rev 834)
@@ -0,0 +1,194 @@
+/*
+ * =========================================================================
+ * big_buf_io - A library to allow efficient small reads and writes.
+ * Copyright (c) 1997,2001  Fred M. Erickson
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2, or (at your option)
+ * any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program; if not, write to the Free Software
+ * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
+ * =========================================================================
+ *
+ *
+ * Routines that let you do a lot of small reads and writes from a file,
+ * without a lot of OS penalty.  Simply call buf_read() and buf_write()
+ * instead of read() and write().  These routines read/write a file in
+ * large chunks and pass you the data in the small chunks that you ask
+ * for.  Note some caveats:
+ *
+ *	You must call buf_write(filedes, buf, 0) when you are done
+ *	writing so that it can flush the write buffer.
+ *	(Should maybe implement buf_flush() to take care of this.)
+ *
+ *	You can't reset the file pointer with lseek() or you will
+ *	mess up these routines.
+ *
+ *	You can only use these routines with one file at a time,
+ *	since there is only one buffer to hold the data.
+ *
+ *      If you only have one file to read, you can simply open it
+ *      and begin calling these routines.  If you have more than
+ *      one file to read (consecutively, of course), you should use
+ *      buf_open() to open the files, so that proper initialization
+ *      gets done.  buf_close() has been added for completeness, but
+ *      it doesn't do anything except call close().
+ *
+ * get_a_line() fills a buffer with information until it finds a newline,
+ * or runs out of space.
+ */
+
+#include <sys/types.h>
+#include <fcntl.h>
+
+int buf_open(const char *, int, mode_t, ...);
+int buf_close(int);
+ssize_t buf_read(int, void *, size_t);
+ssize_t buf_write(int, const void *, size_t);
+ssize_t get_a_line(int, void *, size_t);
+
+#define BUF_SIZE  16384
+static r_place = 0;
+static r_size = 0;
+static w_place = 0;
+
+
+
+
+int
+buf_open(const char *pathname, int flags, mode_t mode, ...)
+{
+	r_place = 0;
+	r_size = 0;
+	w_place = 0;
+
+	if (flags & O_CREAT)  {
+		return(open(pathname, flags, mode));
+	}
+	else  {
+		return(open(pathname, flags));
+	}
+}
+
+
+
+
+int
+buf_close(int fdesc)
+{
+	return(close(fdesc));
+}
+
+
+
+
+ssize_t
+buf_read(int filedes, void *buf, size_t nbyte)
+{
+	static char bigbuf[BUF_SIZE];
+	long amount;
+	long tmp_nbyte;
+	char *local_buf;
+
+	local_buf = (char *)buf;
+
+	tmp_nbyte = nbyte;
+
+	while (tmp_nbyte > 0)  {
+		if ((r_size <= 0) || (r_place == r_size))  {
+			r_size = read(filedes, bigbuf, BUF_SIZE);
+			if (r_size <= 0)  {
+				return(r_size);
+			}
+			r_place = 0;
+		}
+
+		amount = (r_size - r_place) >= tmp_nbyte ? tmp_nbyte : r_size - r_place;
+		memcpy(local_buf, &bigbuf[r_place], amount);
+		local_buf = local_buf + amount;
+		r_place = r_place + amount;
+		tmp_nbyte = tmp_nbyte - amount;
+	}
+
+	return(nbyte);
+}
+
+
+
+ssize_t
+buf_write(int filedes, const void *buf, size_t nbyte)
+{
+	static char bigbuf[BUF_SIZE];
+	long amount;
+	long tmp_nbyte;
+	long ret_val;
+	char *local_buf;
+
+	local_buf = (char *)buf;
+
+	if (nbyte == 0)  {
+		ret_val = write(filedes, bigbuf, w_place);
+		if (ret_val < 0)  {
+			return(ret_val);
+		}
+		else  {
+			return(0);
+		}
+	}
+
+	tmp_nbyte = nbyte;
+
+	while (tmp_nbyte > 0)  {
+		amount = (BUF_SIZE - w_place) >= tmp_nbyte ? tmp_nbyte : BUF_SIZE - w_place;
+		memcpy(&bigbuf[w_place], local_buf, amount);
+		local_buf = local_buf + amount;
+		w_place = w_place + amount;
+		tmp_nbyte = tmp_nbyte - amount;
+
+		if (w_place == BUF_SIZE)  {
+			if (write(filedes, bigbuf, BUF_SIZE) != BUF_SIZE)  {
+				return(-1);
+			}
+			w_place = 0;
+		}
+	}
+
+	return(nbyte);
+}
+
+
+
+
+
+ssize_t
+get_a_line(int filedes, void *buf, size_t nbyte)
+{
+	long i = 0;
+	ssize_t ret_val;
+
+	while (i < nbyte)  {
+		ret_val = buf_read(filedes, buf + i, 1);
+		if (ret_val < 0)  {
+			return(ret_val);
+		}
+		else if (ret_val == 0)  {
+			return((ssize_t)i);
+		}
+
+		if (*((unsigned char *)buf + i) == '\n')  {
+			return(i + 1);
+		}
+
+		i++;
+	}
+
+	return((ssize_t)nbyte);
+}

Added: packages/drawmap/branches/upstream/current/big_buf_io_z.c
===================================================================
--- packages/drawmap/branches/upstream/current/big_buf_io_z.c	                        (rev 0)
+++ packages/drawmap/branches/upstream/current/big_buf_io_z.c	2007-05-18 12:29:03 UTC (rev 834)
@@ -0,0 +1,150 @@
+/*
+ * =========================================================================
+ * big_buf_io_z - A library to allow efficient small reads and writes from gzipped files.
+ * Copyright (c) 1997,2001  Fred M. Erickson
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2, or (at your option)
+ * any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program; if not, write to the Free Software
+ * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
+ * =========================================================================
+ *
+ *
+ * Routines that let you do a lot of small reads from a gzip-compressed file,
+ * without a lot of OS penalty.  Simply call buf_read_z() instead of read().
+ * These routines read a file in large chunks and pass you the data in the
+ * small chunks that you ask for.  Note some caveats:
+ *
+ *	You can't reset the file pointer with lseek() or you will
+ *	mess up these routines.
+ *
+ *	You can only use these routines with one file at a time,
+ *	since there is only one buffer to hold the data.
+ *
+ *      If you only have one file to read, you can simply open it
+ *      and begin calling these routines.  If you have more than
+ *      one file to read (consecutively, of course), you should use
+ *      buf_open_z() to open the files, so that proper initialization
+ *      gets done.  buf_close_z() has been added for completeness, but
+ *      it doesn't do anything except call close().
+ *
+ * get_a_line_z() fills a buffer with information until it finds a newline,
+ * or runs out of space.
+ *
+ * These routines depend on the zread() function, in the file gunzip.c
+ */
+
+#include <sys/types.h>
+#include <fcntl.h>
+#include "gzip.h"
+
+int buf_open_z(const char *, int, mode_t, ...);
+int buf_close_z(int);
+ssize_t buf_read_z(int, void *, size_t);
+ssize_t get_a_line_z(int, void *, size_t);
+
+#define BUF_SIZE  WSIZE		/* This MUST be at least as large as WSIZE, or zread() won't work properly */
+static r_place = 0;
+static r_size = 0;
+static w_place = 0;
+static new_flag = 1;
+
+
+
+
+int
+buf_open_z(const char *pathname, int flags, mode_t mode, ...)
+{
+	r_place = 0;
+	r_size = 0;
+	w_place = 0;
+	new_flag = 1;
+
+	if (flags & O_CREAT)  {
+		return(open(pathname, flags, mode));
+	}
+	else  {
+		return(open(pathname, flags));
+	}
+}
+
+
+
+
+int
+buf_close_z(int fdesc)
+{
+	return(close(fdesc));
+}
+
+
+
+
+ssize_t
+buf_read_z(int filedes, void *buf, size_t nbyte)
+{
+	static char bigbuf[BUF_SIZE];
+	long amount;
+	long tmp_nbyte;
+	char *local_buf;
+
+	local_buf = (char *)buf;
+
+	tmp_nbyte = nbyte;
+
+	while (tmp_nbyte > 0)  {
+		if ((r_size <= 0) || (r_place == r_size))  {
+			r_size = zread(filedes, bigbuf, BUF_SIZE, new_flag);
+			if (r_size <= 0)  {
+				return(r_size);
+			}
+			r_place = 0;
+			new_flag = 0;
+		}
+
+		amount = (r_size - r_place) >= tmp_nbyte ? tmp_nbyte : r_size - r_place;
+		memcpy(local_buf, &bigbuf[r_place], amount);
+		local_buf = local_buf + amount;
+		r_place = r_place + amount;
+		tmp_nbyte = tmp_nbyte - amount;
+	}
+
+	return(nbyte);
+}
+
+
+
+
+ssize_t
+get_a_line_z(int filedes, void *buf, size_t nbyte)
+{
+	long i = 0;
+	ssize_t ret_val;
+
+	while (i < nbyte)  {
+		ret_val = buf_read_z(filedes, buf + i, 1);
+		if (ret_val < 0)  {
+			return(ret_val);
+		}
+		else if (ret_val == 0)  {
+			return((ssize_t)i);
+		}
+
+		if (*((unsigned char *)buf + i) == '\n')  {
+			return(i + 1);
+		}
+
+		i++;
+	}
+
+	return((ssize_t)nbyte);
+}

Added: packages/drawmap/branches/upstream/current/colors.h
===================================================================
--- packages/drawmap/branches/upstream/current/colors.h	                        (rev 0)
+++ packages/drawmap/branches/upstream/current/colors.h	2007-05-18 12:29:03 UTC (rev 834)
@@ -0,0 +1,319 @@
+/*
+ * =========================================================================
+ * colors.h - A header file to define user-defineable color parameters.
+ * Copyright (c) 2000  Fred M. Erickson
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2, or (at your option)
+ * any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program; if not, write to the Free Software
+ * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
+ * =========================================================================
+ */
+
+/*
+ * This file defines the color scheme for a drawmap-generated map.
+ * Some limited color information is included in drawmap.h, because it isn't
+ * intended to be changed and because most drawing functions need it.
+ * The remainder of the color information is in this file, so that it
+ * can be collected in one place.  The intent is that, if users want to
+ * experiment with different color schemes, they should be able to do so
+ * by modifying only this file, and the color map code in drawmap.c.
+ *
+ * We begin with a general discussion of the color scheme, and the
+ * philosophy behind it.
+ *
+ * The CIE diagram (not shown here, but readily available from reference sources)
+ * is a horseshoe-shaped chart that illustrates the range of colors that humans
+ * can perceive.  CMYK printers and computer monitors roughly (but not completely)
+ * coincide in the portion of the CIE diagram that they occupy.  They occupy triangular
+ * regions (with their respective primary colors at the corners) that encompass
+ * the center of the diagram, but don't extend to the edges of the chart.  Thus,
+ * neither a monitor nor a printer can generally display all human-viewable colors,
+ * but rather only the colors that fall within their respective triangular regions.
+ * The triangular region for a monitor or printer is called its "gamut".
+ *
+ * In general, a spiral on the CIE chromaticity diagram is considered
+ * a good way to encode sequential data.  The spiral begins with a relatively
+ * saturated version of a color and spirals inward to reach less-saturated, whiter
+ * colors.  Since relief maps generally start with a greenish color near sea
+ * level, the spiral should go something like:  green, yellow, orange, red,
+ * magenta, blue, cyan, lighter green, lighter yellow, ligher orange, lighter
+ * red, lighter magenta, lighter blue, lighter cyan, white.
+ *
+ * After trying the above color sequence, it appears prudent to eliminate blue
+ * from the rotation, because it makes it look like there are big lakes
+ * everywhere.
+ *
+ * Because the monitor and printer gamuts don't, in general, completely coincide,
+ * if we want to be able to display and print on a wide variety of hardware,
+ * fully-saturated colors are not a good idea for the outer portions of the spiral.
+ * (A fully-saturated color for a monitor, might fall outside of the triangular
+ * region for a printer.)  We will use fully-saturated colors anyway, because they
+ * look good on a monitor.
+ * 
+ * For elevations below sea level, it seems reasonable to run
+ * the spiral backwards through cyan, blue, magenta, and black, as the
+ * elevations proceed from 0 to the trench bottoms.  For now, we aren't going
+ * to have any really deep depths, so we will just use cyan for any elevations
+ * below zero.
+ *
+ * Since most Americans prefer to deal in feet, we will use 1000-foot
+ * intervals, which are pretty close to 300-meter intervals.  Because the DEM
+ * data is in meters, and because I prefer metric units, drawmap internally
+ * uses metric values.
+ *
+ * If we just wanted to indicate bands of elevation, the color spiral described
+ * above would be enough.  However, we also want to give some indication of
+ * the variations in the terrain within each elevation band.  This is normally
+ * done with shaded relief, in which we shade the map as if it were illuminated
+ * with sunlight from above one of the corners.  Thus, for each color in the
+ * color spiral, we need to define a range of shades that will give some
+ * indication of light and shadow.  We do this by defining color bands.
+ * Each band represents a single color, with 16 different intensities.
+ *
+ * The default colors are defined as follows, where the [R,G,B] values
+ * represent the most intense shade in each color band:
+ *
+ *       Feet		Color		[R,G,B]			Meters
+ *   --------------	----------	-------------		----------------------
+ *         Below 0	Cyan		[ 60,255,255]		(center of Earth to 0)
+ *       0 to 1000	Green		[ 60,255, 60]		(   0 to  305)
+ *    1000 to 2000	Yellow		[255,255, 60]		( 305 to  610)
+ *    2000 to 3000	Orange		[255,165, 60]		( 610 to  914)
+ *    3000 to 4000	Red		[255, 60, 60]		( 914 to 1219)
+ *    4000 to 5000	Magenta		[255, 60,255]		(1219 to 1524)
+ *    5000 to 6000	Cyan		[ 60,255,255]		(1524 to 1829)
+ *    6000 to 7000	Light Green	[165,255,165]		(1829 to 2134)
+ *    7000 to 8000	Light Yellow	[255,255,165]		(2134 to 2438)
+ *    8000 to 9000 	Light Orange	[255,210,165]		(2438 to 2743)
+ *    9000 to 10000	Light Red	[255,165,165]		(2743 to 3048)
+ *   10000 to 11000	Light Magenta	[255,165,255]		(3048 to 3353)
+ *   11000 to 12000	Light Blue	[165,165,255]		(3353 to 3658)
+ *   12000 to 13000	Light Cyan	[165,255,255]		(3658 to 3962)
+ *     Above 13000	White		[255,255,255]		(3962 to infinity)
+ *
+ * Also provided, below, is a more natural-looking color map.  The user can
+ * select it via a command-line option.
+ *
+ * Thus ends the discussion of philosophy.  We now go on to define the specifics
+ * of how the color table is encoded.
+ */
+
+
+/*
+ * This is the master color table for maps generated by drawmap.
+ *
+ * It is designed so that you can easily change the color scheme if you want to.
+ *
+ * The SUN rasterfile color map provides 256 RGB colors.  We divide them
+ * into 16 color bands.  This table defines how the SUN color map is filled
+ * with colors.
+ *
+ * Except for band 14, which is currently unused, and band 15, which is
+ * reserved for some special colors, the bands each represent a range of
+ * elevations.  Within each band, there is a range of intensities for a given
+ * color, with each intensity representing a different steepness of terrain
+ * within that range of elevations.
+ *
+ * The first entry in each row of the table is an index into the SUN color map.
+ * Currently, these are simply spaced by 16 colors.  (They are defined in
+ * drawmap.h.)  Don't mess with them unless you really understand how the
+ * drawmap code works.
+ *
+ * The second entry in each row is a maximum elevation for that
+ * color band (in meters).  For example, color band 6 is used to for coloring
+ * terrain that has elevations greater than the maximum elevation for color
+ * band 5, but less than or equal to the maximum elevation for color band 6.
+ * By default, color band 6 is Light Green.  The red, green, and blue values
+ * for color band 6 are set so that the first color in that color band is
+ * Light Green.  The remaining 15 colors in that color band are derived from
+ * the initial entry by simply dimming them more and more until the last color
+ * in the band is black.
+ *
+ * All of color bands 0 through 13 work in the same way.  Color band
+ * 14 is totally unused, and is available for experimentation.  Color
+ * band 15 is really not a band of colors at all, but rather just a collection
+ * of individually-defined colors for special uses.  The colors in that
+ * band are bright primary colors and the like, intended for applications such as
+ * coloring streams bright blue and roads bright red.  Changing any of
+ * the numbers, in color_tab, for color band 15 is a waste of time, because
+ * they aren't currently used.  The color band 15 entry is included for
+ * completeness, and in case we some day want to truly use all 16 color bands
+ * for elevation shading.  The colors for band 15 are actually defined by the
+ * "brights" array, further on in this file.
+ *
+ * Elevations 0 and HIGHEST_ELEVATION get some special handling.  In
+ * particular, since elevations below 0 are rare, we simply re-use the CYAN
+ * from band 5 when we color a sub-sea-level point.  HIGHEST_ELEVATION is
+ * in the mid-stratosphere to ensure that there are no land features higher
+ * than it is.  We use HIGHEST_ELEVATION as a special flag, to set regions
+ * of the map to white by giving them an elevation of exactly HIGHEST_ELEVATION.
+ * Thus, any regions of the map for which the user does not provide DEM data,
+ * are initialized to HIGHEST_ELEVATION and end up as simple white background.
+ *
+ * By default, the elevations are selected so that each color band
+ * represents a 1000-foot range of elevations.  You can change the
+ * elevations as you wish, as long as you follow these rules:
+ *
+ *   (1) The elevation for color band 0 should always be greater than zero.
+ *   (2) The elevation for the last valid color band (normally band 13,
+ *       but could be band 14 if you choose to use it) must be set to
+ *       HIGHEST_ELEVATION.  (If you do choose to use band 14, you will
+ *       need to make some simple changes in the drawmap code.)
+ *   (3) The elevations must be arranged in ascending order.
+ *   (4) Remember that elevations below 0 re-use the CYAN color band
+ *       from band 5.  See to it that C_INDEX_SEA_? (defined below) is set to the
+ *       same value as the C_INDEX_? value for a CYAN color band.  (Or choose
+ *       some other color band appropriate for sub-sea-level terrain.)
+ *
+ * You can also change the default colors, by changing their red, green, and blue
+ * components.  The default color scheme is not very "natural" since it is based on
+ * the CIE spiral described above.
+ *
+ * There are actually four color tables defined:
+ * One very-garish one based on the spiral, another more natural one that includes the colors
+ * typically associated with the vegetation and rocks at various altitudes,
+ * a neutral one that is intended to look natural without being very obtrusive, and
+ * one that is reminiscent of maps in school textbooks and is sort of midway between
+ * the spiral and the natural.
+ *
+ * The -n option selects between the tables.
+ *
+ * Of course, "naturalness" is highly subjective, and your preferred colors may be different.
+ */
+struct color_tab  {
+	unsigned char	c_index;	// index into the color map for the start of this color band [0-255]
+	long		max_elevation;	// maximum elevation for this color band (in meters)
+	unsigned char	red;		// red component of the color for this color band [0 - 255]
+	unsigned char	green;		// green component of the color for this color band [0 - 255]
+	unsigned char	blue;		// blue component of the color for this color band [0 - 255]
+};
+
+struct color_tab color_tab_neutral[16] =  {
+	C_INDEX_0,	              305,	190,	255,	175,	// LIGHT GREEN
+	C_INDEX_1,	              610,	190,	244,	174,	// interpolated
+	C_INDEX_2,	              914,	190,	233,	174,	// interpolated
+	C_INDEX_3,	             1219,	190,	222,	173,	// interpolated
+	C_INDEX_4,	             1524,	190,	211,	172,	// interpolated
+	C_INDEX_5,	             1829,	190,	200,	170,	// OLIVE GREEN
+	C_INDEX_6,	             2134,	185,	187,	165,	// interpolated
+	C_INDEX_7,	             2438,	180,	174,	160,	// interpolated
+	C_INDEX_8,	             2743,	175,	162,	155,	// interpolated
+	C_INDEX_9,	             3048,	170,	150,	150,	// BROWN
+	C_INDEX_10,	             3353,	160,	160,	160,	// interpolated
+	C_INDEX_11,	             3658,	190,	190,	190,	// interpolated
+	C_INDEX_12,	             3962,	220,	220,	220,	// interpolated
+	C_INDEX_13,	HIGHEST_ELEVATION,	255,	255,	255,	// WHITE
+	C_INDEX_14,	HIGHEST_ELEVATION /* stratosphere */,	  0,	  0,	  0,	// currently unused
+	C_INDEX_15,	               -1 /*              */,	  0,	  0,	  0,	// This line is filler --- the entries here are unused
+};
+#define C_INDEX_SEA_NEUTRAL	C_INDEX_10
+
+struct color_tab color_tab_natural[16] =  {
+	C_INDEX_0,	              305,	190,	255,	175,	// LIGHT GREEN
+	C_INDEX_1,	              610,	195,	237,	165,	// GRAYISH GREEN
+	C_INDEX_2,	              914,	200,	210,	156,	// GREENISH ORANGE
+	C_INDEX_3,	             1219,	205,	194,	147,	// OLIVE
+	C_INDEX_4,	             1524,	210,	178,	138,	// OLIVE ORANGE
+	C_INDEX_5,	             1829,	215,	162,	129,	// LIGHT ORANGE
+	C_INDEX_6,	             2134,	220,	145,	120,	// RED ORANGE
+	C_INDEX_7,	             2438,	207,	150,	135,	// RUST
+	C_INDEX_8,	             2743,	193,	155,	150,	// DULL BROWN
+	C_INDEX_9,	             3048,	179,	160,	165,	// LIGHT MAGENTA
+	C_INDEX_10,	             3353,	165,	165,	180,	// GRAY
+	C_INDEX_11,	             3658,	210,	210,	255,	// LIGHT CYAN
+	C_INDEX_12,	             3962,	232,	232,	255,	// LIGHTER CYAN
+	C_INDEX_13,	HIGHEST_ELEVATION,	255,	255,	255,	// WHITE
+	C_INDEX_14,	HIGHEST_ELEVATION /* stratosphere */,	  0,	  0,	  0,	// currently unused
+	C_INDEX_15,	               -1 /*              */,	  0,	  0,	  0,	// This line is filler --- the entries here are unused
+};
+#define C_INDEX_SEA_NATURAL	C_INDEX_10
+
+struct color_tab color_tab_textbook[16] =  {
+	C_INDEX_0,	              305,	 60,	255,	 60,	// GREEN
+	C_INDEX_1,	              610,	150,	255,	 30,	// YELLOWISH GREEN
+	C_INDEX_2,	              914,	200,	255,	  0,	// GREENISH YELLOW
+	C_INDEX_3,	             1219,	218,	225,	 20,	// OLIVE
+	C_INDEX_4,	             1524,	237,	205,	 40,	// LIGHT ORANGE
+	C_INDEX_5,	             1829,	255,	185,	 60,	// ORANGE
+	C_INDEX_6,	             2134,	255,	145,	 95,	// RUST
+	C_INDEX_7,	             2438,	255,	125,	115,	// RED ORANGE
+	C_INDEX_8,	             2743,	225,	138,	150,	// LIGHT MAGENTA
+	C_INDEX_9,	             3048,	195,	152,	150,	// BROWNISH MAGENTA
+	C_INDEX_10,	             3353,	165,	165,	180,	// GRAY
+	C_INDEX_11,	             3658,	210,	210,	255,	// LIGHT CYAN
+	C_INDEX_12,	             3962,	232,	232,	255,	// LIGHTER CYAN
+	C_INDEX_13,	HIGHEST_ELEVATION,	255,	255,	255,	// WHITE
+	C_INDEX_14,	HIGHEST_ELEVATION /* stratosphere */,	  0,	  0,	  0,	// currently unused
+	C_INDEX_15,	               -1 /*              */,	  0,	  0,	  0,	// This line is filler --- the entries here are unused
+};
+#define C_INDEX_SEA_TEXTBOOK	C_INDEX_10
+
+struct color_tab color_tab_spiral[16] =  {
+	C_INDEX_0,	              305 /*    1000 feet */,	 60,	255,	 60,	// GREEN
+	C_INDEX_1,	              610 /*    2000 feet */,	255,	255,	 60,	// YELLOW
+	C_INDEX_2,	              914 /*    3000 feet */,	255,	165,	 60,	// ORANGE
+	C_INDEX_3,	             1219 /*    4000 feet */,	255,	 60,	 60,	// RED
+	C_INDEX_4,	             1524 /*    5000 feet */,	255,	 60,	255,	// MAGENTA
+	C_INDEX_5,	             1829 /*    6000 feet */,	 60,	255,	255,	// CYAN
+	C_INDEX_6,	             2134 /*    7000 feet */,	165,	255,	165,	// LIGHT GREEN
+	C_INDEX_7,	             2438 /*    8000 feet */,	255,	255,	165,	// LIGHT YELLOW
+	C_INDEX_8,	             2743 /*    9000 feet */,	255,	210,	165,	// LIGHT ORANGE
+	C_INDEX_9,	             3048 /*   10000 feet */,	255,	165,	165,	// LIGHT RED
+	C_INDEX_10,	             3353 /*   11000 feet */,	255,	165,	255,	// LIGHT MAGENTA
+	C_INDEX_11,	             3658 /*   12000 feet */,	165,	165,	255,	// LIGHT BLUE
+	C_INDEX_12,	             3962 /*   13000 feet */,	165,	255,	255,	// LIGHT CYAN
+	C_INDEX_13,	HIGHEST_ELEVATION /* stratosphere */,	255,	255,	255,	// WHITE
+	C_INDEX_14,	HIGHEST_ELEVATION /* stratosphere */,	  0,	  0,	  0,	// currently unused
+	C_INDEX_15,	               -1 /*              */,	  0,	  0,	  0,	// This line is filler --- the entries here are unused
+};
+#define C_INDEX_SEA_SPIRAL	C_INDEX_11
+
+/*
+ * Set the following parameter to the total number of color bands you have defined.
+ * It is a good idea if all tables have the same number of valid bands,
+ * but not strictly necessary if you are careful and know what you are
+ * doing.  Note, in particular, the places in the code where MAX_VALID_BANDS is used.
+ */
+#define MAX_VALID_BANDS 14
+
+#define NUM_COLOR_TABS 4	// Set this to the total number of color tables you have defined.
+
+
+
+/*
+ * These are the SUN rasterfile color map values for various individual
+ * colors.  These colors occupy the slots in color band 15, as described above.
+ */
+struct brights  {
+	unsigned char	c_index;	// index into the color map for this color [C_INDEX_15 - 255]
+	unsigned char	red;		// red component of the color for this color band [0 - 255]
+	unsigned char	green;		// green component of the color for this color band [0 - 255]
+	unsigned char	blue;		// blue component of the color for this color band [0 - 255]
+} brights[16] =  {
+	C_INDEX_15,		210,	 15,	 30,	// BRIGHT RED
+	C_INDEX_15 + 1,		 50,	210,	 50,	// BRIGHT GREEN
+	C_INDEX_15 + 2,		 40,	 60,	190,	// BRIGHT BLUE
+	C_INDEX_15 + 3,		  0,	  0,	  0,	// BLACK
+	C_INDEX_15 + 4,		120,	120,	120,	// GRAY
+	C_INDEX_15 + 5,		255,	210,	165,	// LIGHT ORANGE
+	C_INDEX_15 + 6,		255,	255,	255,	// WHITE
+	C_INDEX_15 + 7,		  0,	  0,	  0,	// unused
+	C_INDEX_15 + 8,		  0,	  0,	  0,	// unused
+	C_INDEX_15 + 9,		  0,	  0,	  0,	// unused
+	C_INDEX_15 + 10,	  0,	  0,	  0,	// unused
+	C_INDEX_15 + 11,	  0,	  0,	  0,	// unused
+	C_INDEX_15 + 12,	  0,	  0,	  0,	// unused
+	C_INDEX_15 + 13,	  0,	  0,	  0,	// unused
+	C_INDEX_15 + 14,	  0,	  0,	  0,	// unused
+	C_INDEX_15 + 15,	  0,	  0,	  0,	// unused
+};

Added: packages/drawmap/branches/upstream/current/dem.c
===================================================================
--- packages/drawmap/branches/upstream/current/dem.c	                        (rev 0)
+++ packages/drawmap/branches/upstream/current/dem.c	2007-05-18 12:29:03 UTC (rev 834)
@@ -0,0 +1,1098 @@
+/*
+ * =========================================================================
+ * dem.c - Routines to handle DEM data.
+ * Copyright (c) 2000,2001  Fred M. Erickson
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2, or (at your option)
+ * any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program; if not, write to the Free Software
+ * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
+ * =========================================================================
+ */
+
+#include <stdlib.h>
+#include <stdio.h>
+#include <math.h>
+#include "drawmap.h"
+#include "dem.h"
+
+
+
+/*
+ * This routine parses relevant data from a DEM file type A record
+ * and inserts the converted data into the given storage structure.
+ */
+void
+parse_dem_a(unsigned char *buf, struct dem_record_type_a *dem_a, struct datum *dem_datum)
+{
+	long i;
+	unsigned char save_byte;
+
+	/*
+	 * Parse all of the data from the header that we care about.
+	 * For now, don't waste time parsing things that aren't
+	 * currently interesting.
+	 * Since it is possible for numbers to butt together at field
+	 * edges, we do the little save_byte thing to ensure that we
+	 * don't convert two at a time.
+	 *
+	 * There are a lot of comments in dem.h describing the various
+	 * header fields, so this block of code is presented largely
+	 * sans comments.
+	 */
+	strncpy(dem_a->title, buf, 80);
+	save_byte = buf[150]; buf[150] = '\0'; dem_a->level_code =  strtol(&buf[144], (char **)0, 10); buf[150] = save_byte;
+	save_byte = buf[162]; buf[162] = '\0'; dem_a->plane_ref =   strtol(&buf[156], (char **)0, 10); buf[162] = save_byte;
+	save_byte = buf[168]; buf[168] = '\0'; dem_a->zone =        strtol(&buf[162], (char **)0, 10); buf[168] = save_byte;
+	save_byte = buf[534]; buf[534] = '\0'; dem_a->plane_units = strtol(&buf[528], (char **)0, 10); buf[534] = save_byte;
+	save_byte = buf[540]; buf[540] = '\0'; dem_a->elev_units =  strtol(&buf[534], (char **)0, 10); buf[540] = save_byte;
+	for (i = 546; i < 786; i++)  {
+		/* The DEM files use both 'D' and 'E' for exponentiation.  strtod() expects 'E' or 'e'. */
+		if (buf[i] == 'D') buf[i] = 'E';
+	}
+	save_byte = buf[570]; buf[570] = '\0'; dem_a->sw_x_gp = strtod(&buf[546], (char **)0); buf[570] = save_byte;
+	save_byte = buf[594]; buf[594] = '\0'; dem_a->sw_y_gp = strtod(&buf[570], (char **)0); buf[594] = save_byte;
+	save_byte = buf[618]; buf[618] = '\0'; dem_a->nw_x_gp = strtod(&buf[594], (char **)0); buf[618] = save_byte;
+	save_byte = buf[642]; buf[642] = '\0'; dem_a->nw_y_gp = strtod(&buf[618], (char **)0); buf[642] = save_byte;
+	save_byte = buf[666]; buf[666] = '\0'; dem_a->ne_x_gp = strtod(&buf[642], (char **)0); buf[666] = save_byte;
+	save_byte = buf[690]; buf[690] = '\0'; dem_a->ne_y_gp = strtod(&buf[666], (char **)0); buf[690] = save_byte;
+	save_byte = buf[714]; buf[714] = '\0'; dem_a->se_x_gp = strtod(&buf[690], (char **)0); buf[714] = save_byte;
+	save_byte = buf[738]; buf[738] = '\0'; dem_a->se_y_gp = strtod(&buf[714], (char **)0); buf[738] = save_byte;
+	save_byte = buf[762]; buf[762] = '\0'; dem_a->min_elev = strtod(&buf[738], (char **)0); buf[762] = save_byte;
+	save_byte = buf[786]; buf[786] = '\0'; dem_a->max_elev = strtod(&buf[762], (char **)0); buf[786] = save_byte;
+	save_byte = buf[810]; buf[810] = '\0'; dem_a->angle = strtod(&buf[786], (char **)0); buf[810] = save_byte;
+	save_byte = buf[816]; buf[816] = '\0'; dem_a->accuracy = strtol(&buf[810], (char **)0, 10); buf[816] = save_byte;
+	for (i = 816; i < 852; i++)  {
+		/* The DEM files use both 'D' and 'E' for exponentiation.  strtod() expects 'E' or 'e'. */
+		if (buf[i] == 'D') buf[i] = 'E';
+	}
+	save_byte = buf[828]; buf[828] = '\0'; dem_a->x_res = round(strtod(&buf[816], (char **)0)); buf[828] = save_byte;
+	save_byte = buf[840]; buf[840] = '\0'; dem_a->y_res = round(strtod(&buf[828], (char **)0)); buf[840] = save_byte;
+	save_byte = buf[852]; buf[852] = '\0'; dem_a->z_res = round(strtod(&buf[840], (char **)0)); buf[852] = save_byte;
+	save_byte = buf[858]; buf[858] = '\0'; dem_a->rows = strtol(&buf[852], (char **)0, 10); buf[858] = save_byte;
+	save_byte = buf[864]; buf[864] = '\0'; dem_a->cols = strtol(&buf[858], (char **)0, 10); buf[864] = save_byte;
+	/*
+	 * The following element is only present in the new Type A format.
+	 * We thus need to check for its presence rather than just doing the conversion.
+	 */
+	if (buf[891] == ' ')  {
+		/* There is no entry, so we probably have an old-style record. */
+		dem_a->horizontal_datum = -1;
+	}
+	else  {
+		/* There is an entry, so we must have a new-style record. */
+		save_byte = buf[892]; buf[892] = '\0'; dem_a->horizontal_datum = strtol(&buf[890], (char **)0, 10); buf[892] = save_byte;
+	}
+	if ((dem_a->horizontal_datum == -1) || (dem_a->horizontal_datum == 1))  {
+		/* The datum is NAD-27.  Initialize the parameters. */
+		dem_datum->a = NAD27_SEMIMAJOR; dem_datum->b = NAD27_SEMIMINOR;
+		dem_datum->e_2 = NAD27_E_SQUARED;
+		dem_datum->f_inv = NAD27_F_INV;
+		dem_datum->k0 = UTM_K0;
+		dem_datum->a0 = NAD27_A0;
+		dem_datum->a2 = NAD27_A2;
+		dem_datum->a4 = NAD27_A4;
+		dem_datum->a6 = NAD27_A6;
+	}
+	else if (dem_a->horizontal_datum == 3)  {
+		/* The datum is WGS-84.  Initialize the parameters. */
+		dem_datum->a = WGS84_SEMIMAJOR;
+		dem_datum->b = WGS84_SEMIMINOR;
+		dem_datum->e_2 = WGS84_E_SQUARED;
+		dem_datum->f_inv = WGS84_F_INV;
+		dem_datum->k0 = UTM_K0;
+		dem_datum->a0 = WGS84_A0;
+		dem_datum->a2 = WGS84_A2;
+		dem_datum->a4 = WGS84_A4;
+		dem_datum->a6 = WGS84_A6;
+	}
+	else if (dem_a->horizontal_datum == 4)  {
+		/* The datum is NAD-83.  Initialize the parameters. */
+		dem_datum->a = NAD83_SEMIMAJOR;
+		dem_datum->b = NAD83_SEMIMINOR;
+		dem_datum->e_2 = NAD83_E_SQUARED;
+		dem_datum->f_inv = NAD83_F_INV;
+		dem_datum->k0 = UTM_K0;
+		dem_datum->a0 = NAD83_A0;
+		dem_datum->a2 = NAD83_A2;
+		dem_datum->a4 = NAD83_A4;
+		dem_datum->a6 = NAD83_A6;
+	}
+	else  {
+		/* We don't handle any other datums yet.  Default to NAD-27. */
+		dem_datum->a = NAD27_SEMIMAJOR;
+		dem_datum->b = NAD27_SEMIMINOR;
+		dem_datum->e_2 = NAD27_E_SQUARED;
+		dem_datum->f_inv = NAD27_F_INV;
+		dem_datum->k0 = UTM_K0;
+		dem_datum->a0 = NAD27_A0;
+		dem_datum->a2 = NAD27_A2;
+		dem_datum->a4 = NAD27_A4;
+		dem_datum->a6 = NAD27_A6;
+
+		fprintf(stderr, "Warning:  The DEM data aren't in a horizontal datum that drawmap\n");
+		fprintf(stderr, "knows about.  Defaulting to NAD-27.  This may result in\npositional errors in the map.\n");
+	}
+
+	/*
+	 * A few fields are only used for SDTS.  Initialize them
+	 * to zero.
+	 */
+	dem_a->x_gp_first = 0.0;
+	dem_a->y_gp_first = 0.0;
+	dem_a->void_fill = 0;
+	dem_a->edge_fill = 0;
+
+	return;
+}
+
+
+
+/*
+ * Print out relevant fields from a DEM type A record, for debugging purposes.
+ * This version prints out only a limited selection of the fields in a Type A header.
+ */
+void
+print_dem_a(struct dem_record_type_a *dem_a)
+{
+	fprintf(stderr, "DEM Type A Record:\n");
+	fprintf(stderr, "  Title       = %.80s\n", dem_a->title);
+	fprintf(stderr, "  level_code  = %d\n", dem_a->level_code);
+	fprintf(stderr, "  plane_ref   = %d\n", dem_a->plane_ref);
+	fprintf(stderr, "  zone        = %d\n", dem_a->zone);
+	fprintf(stderr, "  plane_units = %d\n", dem_a->plane_units);
+	fprintf(stderr, "  elev_units  = %d\n", dem_a->elev_units);
+	fprintf(stderr, "  sw_x_gp     = %g\n", dem_a->sw_x_gp);
+	fprintf(stderr, "  sw_y_gp     = %g\n", dem_a->sw_y_gp);
+	fprintf(stderr, "  nw_x_gp     = %g\n", dem_a->nw_x_gp);
+	fprintf(stderr, "  nw_y_gp     = %g\n", dem_a->nw_y_gp);
+	fprintf(stderr, "  ne_x_gp     = %g\n", dem_a->ne_x_gp);
+	fprintf(stderr, "  ne_y_gp     = %g\n", dem_a->ne_y_gp);
+	fprintf(stderr, "  se_x_gp     = %g\n", dem_a->se_x_gp);
+	fprintf(stderr, "  se_y_gp     = %g\n", dem_a->se_y_gp);
+	fprintf(stderr, "  min_elev    = %d\n", dem_a->min_elev);
+	fprintf(stderr, "  max_elev    = %d\n", dem_a->max_elev);
+	fprintf(stderr, "  angle       = %g\n", dem_a->angle);
+	fprintf(stderr, "  accuracy    = %d\n", dem_a->accuracy);
+	fprintf(stderr, "  x_res       = %g\n", dem_a->x_res);
+	fprintf(stderr, "  y_res       = %g\n", dem_a->y_res);
+	fprintf(stderr, "  z_res       = %g\n", dem_a->z_res);
+	fprintf(stderr, "  cols        = %d\n", dem_a->cols);
+	fprintf(stderr, "  rows        = %d\n", dem_a->rows);
+	fprintf(stderr, "  horiz_datum = %d\n", dem_a->horizontal_datum);
+	fprintf(stderr, "  x_gp_first  = %g\n", dem_a->x_gp_first);
+	fprintf(stderr, "  y_gp_first  = %g\n", dem_a->y_gp_first);
+	fprintf(stderr, "  void_fill   = %d\n", dem_a->void_fill);
+	fprintf(stderr, "  edge_fill   = %d\n", dem_a->edge_fill);
+
+	return;
+}
+
+
+
+
+/*
+ * Print out all relevant fields from a DEM type A record and a DEM type C record.
+ * This version of print_dem() prints out pretty much everything
+ * in both structures, except for some of the fields from the newer format.
+ */
+void
+print_dem_a_c(struct dem_record_type_a *dem_a, struct dem_record_type_c *dem_c)
+{
+	fprintf(stdout, "DEM Type A Record:\n");
+	fprintf(stdout, "  title                   = %.40s\n", dem_a->title);
+	fprintf(stdout, "  se_latitude             = %g\n", dem_a->se_lat);
+	fprintf(stdout, "  se_longitude            = %g\n", dem_a->se_long);
+	fprintf(stdout, "  process_code            = %d\n", dem_a->process_code);
+	fprintf(stdout, "  origin_code             = %4.4s\n", dem_a->origin_code);
+	fprintf(stdout, "  level_code              = %d\n", dem_a->level_code);
+	fprintf(stdout, "  elevation_pattern       = %d\n", dem_a->elevation_pattern);
+	fprintf(stdout, "  plane_ref               = %d\n", dem_a->plane_ref);
+	fprintf(stdout, "  zone                    = %d\n", dem_a->zone);
+	fprintf(stdout, "  plane_units             = %d\n", dem_a->plane_units);
+	fprintf(stdout, "  elev_units              = %d\n", dem_a->elev_units);
+	fprintf(stdout, "  sw_x_gp                 = %g\n", dem_a->sw_x_gp);
+	fprintf(stdout, "  sw_y_gp                 = %g\n", dem_a->sw_y_gp);
+	fprintf(stdout, "  nw_x_gp                 = %g\n", dem_a->nw_x_gp);
+	fprintf(stdout, "  nw_y_gp                 = %g\n", dem_a->nw_y_gp);
+	fprintf(stdout, "  ne_x_gp                 = %g\n", dem_a->ne_x_gp);
+	fprintf(stdout, "  ne_y_gp                 = %g\n", dem_a->ne_y_gp);
+	fprintf(stdout, "  se_x_gp                 = %g\n", dem_a->se_x_gp);
+	fprintf(stdout, "  se_y_gp                 = %g\n", dem_a->se_y_gp);
+	fprintf(stdout, "  min_elev                = %d\n", dem_a->min_elev);
+	fprintf(stdout, "  max_elev                = %d\n", dem_a->max_elev);
+	fprintf(stdout, "  angle                   = %g\n", 0.0);
+	fprintf(stdout, "  accuracy                = %d\n", dem_a->accuracy);
+	fprintf(stdout, "  x_res                   = %g\n", dem_a->x_res);
+	fprintf(stdout, "  y_res                   = %g\n", dem_a->y_res);
+	fprintf(stdout, "  z_res                   = %g\n", dem_a->z_res);
+	fprintf(stdout, "  cols                    = %d  (This value is set to 1 in the main header.)\n", dem_a->cols);
+	fprintf(stdout, "  rows                    = %d\n", dem_a->rows);
+	fprintf(stdout, "  vertical_datum          = %d\n", dem_a->vertical_datum);
+	fprintf(stdout, "  horizontal_datum        = %d\n", dem_a->horizontal_datum);
+	fprintf(stdout, "  vertical_datum_shift    = %g\n", dem_a->vertical_datum_shift);
+	fprintf(stdout, "Other useful information, not in DEM Type A Record:\n");
+	fprintf(stdout, "  UTM x, NW corner sample = %g\n", dem_a->x_gp_first);
+	fprintf(stdout, "  UTM y, NW corner sample = %g\n", dem_a->y_gp_first);
+	fprintf(stdout, "  edge_fill               = %d\n", dem_a->edge_fill);
+	fprintf(stdout, "  void_fill               = %d\n", dem_a->void_fill);
+	fprintf(stdout, "DEM Type C Record:\n");
+	fprintf(stdout, "  datum_stats_flag        = %d\n", dem_c->datum_stats_flag);
+	fprintf(stdout, "  datum_rmse_x            = %d\n", dem_c->datum_rmse_x);
+	fprintf(stdout, "  datum_rmse_y            = %d\n", dem_c->datum_rmse_y);
+	fprintf(stdout, "  datum_rmse_z            = %d\n", dem_c->datum_rmse_z);
+	fprintf(stdout, "  datum_sample_size       = %d\n", dem_c->datum_sample_size);
+	fprintf(stdout, "  dem_stats_flag          = %d\n", dem_c->dem_stats_flag);
+	fprintf(stdout, "  dem_rmse_x              = %d\n", dem_c->dem_rmse_x);
+	fprintf(stdout, "  dem_rmse_y              = %d\n", dem_c->dem_rmse_y);
+	fprintf(stdout, "  dem_rmse_z              = %d\n", dem_c->dem_rmse_z);
+	fprintf(stdout, "  dem_sample_size         = %d\n", dem_c->dem_sample_size);
+
+	return;
+}
+
+
+
+/*
+ * Process a DEM file that uses the Geographic Planimetric Reference System.
+ * These include 30-minute, 1-degree, and Alaska DEMs.  (The routine is so
+ * far untested with 30-minute DEMS, so they may not work.)
+ *
+ * Note:  All 250K DEM files represent 1-degree by 1-degree blocks
+ * (as far as I know), including Alaska DEMs.  (The Alaska DEMs
+ * have smaller numbers of longitude samples, but still cover
+ * 1-degree each.)
+ *
+ * Note that this function has a side effect:  it converts the
+ * latitude/longitude code in dem_a->title into blanks.  This is done
+ * so that the latitude/longitude code won't be included as part of
+ * the DEM name when we capture the DEM name a few lines hence.
+ * The routine has the additional side effect of setting
+ * dem_a->zone to a valid value.  The zone field in the header
+ * is zero for Geographic DEMs.
+ *
+ * This function returns 0 if it allocates memory and reads in the data.
+ * It returns 1 if it doesn't allocate memory.
+ */
+int
+process_geo_dem(int dem_fdesc, ssize_t (*read_function)(), struct image_corners *image_corners,
+		struct dem_corners *dem_corners, struct dem_record_type_a *dem_a, struct datum *dem_datum)
+{
+	long i, j;
+	double f, g;
+	char e_w_code;
+	char latitude_code;
+	long location_code;
+	unsigned char ll_code[8];
+	unsigned char *ptr;
+	unsigned char buf[8 * DEM_RECORD_LENGTH];
+	ssize_t ret_val;
+	int interp_size;
+	long dem_size_x, dem_size_y;
+
+
+	/*
+	 * Apparently, when the USGS digitized the 250K DEM data, they didn't decide
+	 * on a consistent format for the file headers.  Some files have the
+	 * latitude/longitude code (described in a later comment) at byte 49
+	 * in the header, some files have it right at the beginning of the header,
+	 * and others may have it somewhere else (although I have no examples of
+	 * such at this time).
+	 *
+	 * It appears that the code always appears in the first 144-bytes,
+	 * so we will just search the whole thing for something that looks like the
+	 * code.  This is a pain, but it should be reliable.
+	 *
+	 * We go all the way to the end of the field, even after finding a code,
+	 * because we want to null it out and it might appear twice.
+	 */
+	for (i = 0; i < 137; i++)  {
+		if ((dem_a->title[i] != 'N') && (dem_a->title[i] != 'S'))  {
+			continue;
+		}
+
+		if ((dem_a->title[i + 1] < 'A') || (dem_a->title[i + 1] > 'Z') ||
+		    (dem_a->title[i + 2] < '0') || (dem_a->title[i + 2] > '9') ||
+		    (dem_a->title[i + 3] < '0') || (dem_a->title[i + 3] > '9') ||
+		    (dem_a->title[i + 4] != '-')  ||
+		    (dem_a->title[i + 5] < '0') || (dem_a->title[i + 5] > '9') ||
+		    (dem_a->title[i + 6] < '0') || (dem_a->title[i + 6] > '9') ||
+		    ((dem_a->title[i + 7] != 'E') && (dem_a->title[i + 7] != 'W')))  {
+			continue;
+		}
+
+		strncpy(ll_code, &dem_a->title[i], 8);
+		strncpy(&dem_a->title[i], "        ", 8);
+	}
+
+	/*
+	 * The longitude/latitude code in a DEM file is cryptic, and apparently is the
+	 * name of a corresponding paper map sheet.  It basically encodes the description
+	 * of a UTM grid.  It takes a form that is illustrated by the following example:
+	 *
+	 *        NL12-08W
+	 *
+	 * where I think the N simply means "Northern Hemisphere".
+	 * The L12 is a code that gives a 4 degree by 6 degree block.
+	 * Starting at the equator, with 'A', the letter represents 4 degree
+	 * chunks of latitude.  Thus 'L' represents the block from 44N to 48N.
+	 * The 12 is the UTM zone number.  The calculation "-186 + (6 * zone)"
+	 * gives the lower longitude of a 6 degree zone.  Thus, zone 12 represents
+	 * longitudes from -114 to -108 (108W to 114W).
+	 *
+	 * The 4 degree by 6 degree block is divided into 12 rectangular areas,
+	 * each of which contains an east and west chunk.  (The W tells us that
+	 * this is the western 1 degree by 1 degree block.)  The areas are numbered
+	 * as follows:
+	 *
+	 *       1       2        3
+	 *       4       5        6
+	 *       7       8        9
+	 *      10      11       12
+	 *
+	 * Area 1 defines the highest-latitude, highest-longitude
+	 * block which, in this case, spans 47N-48N and 112W-114W.
+	 * Area 12 defines the lowest-latitude, lowest-longitude
+	 * block which, in this case, spans 44N-46N and 108W-110W.
+	 * For our specific example code, area 08 is at 45N-46N and 110W-112W,
+	 * so NL12-08W is at 45N-46N and 111W-112W.
+	 *
+	 * I don't know if there is an equivalent code for the
+	 * southern hemisphere.  We assume here that there is, and
+	 * that it is a mirror image of the northern hemisphere code.
+	 */
+	latitude_code = ll_code[1];
+	dem_a->zone = strtol(&ll_code[2], (char **)0, 10);
+	location_code = strtol(&ll_code[5], (char **)0, 10);
+	e_w_code = ll_code[7];
+	dem_corners->sw_lat = (double)((latitude_code - 'A') * 4);
+	dem_corners->sw_long = -186.0 + (double)(dem_a->zone * 6);
+	i = (location_code - 1) / 3;
+	j = (location_code + 2) % 3;
+	dem_corners->sw_lat = dem_corners->sw_lat + 3.0 - (double)i;
+	dem_corners->sw_long = dem_corners->sw_long + (double)j * 2.0 + (e_w_code == 'W' ? 0.0 : 1.0);
+	if (ll_code[0] == 'S')  {
+		dem_corners->sw_lat = -dem_corners->sw_lat;
+	}
+	dem_corners->ne_lat = dem_corners->sw_lat + 1.0;
+	dem_corners->ne_long = dem_corners->sw_long + 1.0;
+	dem_corners->nw_lat = dem_corners->ne_lat;
+	dem_corners->nw_long = dem_corners->sw_long;
+	dem_corners->se_lat = dem_corners->sw_lat;
+	dem_corners->se_long = dem_corners->ne_long;
+
+	dem_corners->x = ONE_DEGREE_DEM_SIZE;
+	dem_corners->y = ONE_DEGREE_DEM_SIZE;
+
+
+	/*
+	 * If the DEM data don't overlap the image, then ignore them.
+	 *
+	 * If the user didn't specify latitude/longitude ranges for the image,
+	 * then we simply use this DEM to determine those boundaries.  In this
+	 * latter case, no overlap check is necessary (or possible) since the
+	 * image boundaries will be determined later.
+	 */
+	if (image_corners->sw_lat < image_corners->ne_lat)  {
+		/* The user has specified image boundaries.  Check for overlap. */
+		if ((dem_corners->sw_lat >= image_corners->ne_lat) || ((dem_corners->sw_lat + 1.0) <= image_corners->sw_lat) ||
+		    (dem_corners->sw_long >= image_corners->ne_long) || ((dem_corners->sw_long + 1.0) <= image_corners->sw_long))  {
+			return 1;
+		}
+	}
+
+	/*
+	 * Get the number of profiles in the data set.
+	 * For all states in the USA, except Alaska, this value should be 1201.
+	 * In Alaska, it can be 401 or 601.
+	 *
+	 * We use the number of profiles to calculate an interpolation step size,
+	 * which will be used to interpolate to fill out the dataset to 1201 by 1201 samples.
+	 */
+	dem_size_x = dem_a->cols;
+	if ((dem_size_x != 401) && (dem_size_x != 601) && (dem_size_x != 1201))  {
+		fprintf(stderr, "Unexpected number of south-north profiles in DEM data: %d\n", dem_size_x);
+		exit(0);
+	}
+	interp_size = (ONE_DEGREE_DEM_SIZE - 1) / (dem_size_x - 1);
+
+	/*
+	 * Read in the entire DEM file into dem_corners->ptr.
+	 *
+	 * Each record we read is a south-to-north slice of the DEM block.  Successive records move from
+	 * west to east.  Thus, we read each profiles into a one-dimensional array, and then copy it
+	 * into the desired two-dimensional storage area, simulaneously rotating the data so that north
+	 * is at row zero and west is at column zero.
+	 */
+	dem_size_y = -1;
+	for (i = 0; i < ONE_DEGREE_DEM_SIZE; i = i + interp_size)  {
+		if ((ret_val = read_function(dem_fdesc, buf, 8 * DEM_RECORD_LENGTH)) < (DEM_RECORD_LENGTH - 4))  {
+			fprintf(stderr, "read from DEM file returns %d\n", ret_val);
+			exit(0);
+		}
+		if ((buf[ret_val - 1] == '\n') || (buf[ret_val - 1] == '\r')) ret_val--;
+		if ((buf[ret_val - 1] == '\n') || (buf[ret_val - 1] == '\r')) ret_val--;
+
+		if (dem_size_y < 0)  {
+			dem_size_y = strtol(&buf[12], (char **)0, 10);
+			if (dem_size_y != ONE_DEGREE_DEM_SIZE)  {
+				fprintf(stderr, "Number of rows in DEM file is %d, and should be %d.\n", dem_size_y, ONE_DEGREE_DEM_SIZE);
+				exit(0);
+			}
+
+			/*
+			 * Need to allocate space to store the DEM data.
+			 * This space must be freed by the calling function.
+			 */
+			dem_corners->ptr = (short *)malloc(sizeof(short) * ONE_DEGREE_DEM_SIZE * ONE_DEGREE_DEM_SIZE);
+			if (dem_corners->ptr == (short *)0)  {
+				fprintf(stderr, "malloc of dem_corners->ptr failed\n");
+				exit(0);
+			}
+		}
+
+		ptr = &buf[144];	/* Ignore header information on each block */
+
+		for (j = ONE_DEGREE_DEM_SIZE - 1; j >=0; j--)  {
+			if ((ptr - buf) > (ret_val - 6))  {
+				/* We are out of data.  Read some more. */
+				if ((ret_val = read_function(dem_fdesc, buf, 8 * DEM_RECORD_LENGTH)) < (DEM_RECORD_LENGTH - 4))  {
+					fprintf(stderr, "2 read from DEM file returns %d\n", ret_val);
+					exit(0);
+				}
+				if ((buf[ret_val - 1] == '\n') || (buf[ret_val - 1] == '\r')) ret_val--;
+				if ((buf[ret_val - 1] == '\n') || (buf[ret_val - 1] == '\r')) ret_val--;
+				ptr = buf;
+			}
+
+			*(dem_corners->ptr + j * ONE_DEGREE_DEM_SIZE + i) = strtol((char *)ptr, (char **)&ptr, 10);
+			if (dem_a->elev_units == 1)  {
+				/*
+				 * The main body of drawmap likes to work in meters.
+				 * We satisfy that desire by changing feet into meters
+				 * before passing the data back.  (I think that all GEO
+				 * format DEMs are in meters, but we do the check, just
+				 * to be sure.)
+				 *
+				 * We alter the header information below, after all data
+				 * points have been processed.
+				 */
+				*(dem_corners->ptr + j * ONE_DEGREE_DEM_SIZE + i) =
+					(short)round((double)*(dem_corners->ptr + j * ONE_DEGREE_DEM_SIZE + i) * 0.3048);
+			}
+
+			/*
+			 * If there are less than 1201 south-north profiles, then interpolate to form
+			 * a full 1201x1201 dataset.  That way the program only needs to handle one
+			 * dataset size, and things are a lot easier.
+			 */
+			if ((interp_size > 1) && (i > 0))  {
+				if (interp_size == 2)  {
+					*(dem_corners->ptr + j * ONE_DEGREE_DEM_SIZE + i - 1) = round(0.5 * (double)(*(dem_corners->ptr + j * ONE_DEGREE_DEM_SIZE + i) +
+									       *(dem_corners->ptr + j * ONE_DEGREE_DEM_SIZE + i - 2)));
+				}
+				else  {
+					f = (double)(*(dem_corners->ptr + j * ONE_DEGREE_DEM_SIZE + i) -
+						     *(dem_corners->ptr + j * ONE_DEGREE_DEM_SIZE + i - 3)) / 3.0;
+					g = (double)*(dem_corners->ptr + j * ONE_DEGREE_DEM_SIZE + i - 3);
+					*(dem_corners->ptr + j * ONE_DEGREE_DEM_SIZE + i - 2) = round(g + f);
+					*(dem_corners->ptr + j * ONE_DEGREE_DEM_SIZE + i - 1) = round(g + f + f);
+				}
+			}
+		}
+	}
+
+	/*
+	 * The main body of drawmap likes to work in meters.
+	 * We satisfy that desire by changing feet into meters
+	 * before passing the data back.
+	 *
+	 * Here we change the header information.  We already changed the
+	 * actual elevation data above.
+	 */
+	if (dem_a->elev_units == 1)  {
+		dem_a->elev_units = 2;
+	}
+
+	return 0;
+}
+
+
+
+/*
+ * Process a DEM file that uses the UTM Planimetric Reference System.
+ * These include 7.5-minute DEMs.
+ *
+ * This function returns 0 if it allocates memory and reads in the data.
+ * It returns 1 if it doesn't allocate memory.
+ */
+int
+process_utm_dem(int dem_fdesc, ssize_t (*read_function)(), struct image_corners *image_corners,
+		struct dem_corners *dem_corners, struct dem_record_type_a *dem_a, struct datum *dem_datum)
+{
+	long i, j, k;
+	double f, g;
+	long x, y;
+	short *sptr;
+	unsigned char buf[DEM_RECORD_LENGTH];
+	ssize_t ret_val;
+	long profile_rows, profile_columns;
+	long dem_size_x, dem_size_y;
+	double lat_min, long_min;
+	double lat_max, long_max;
+	double x_gp_min, y_gp_min;
+	double x_gp_max, y_gp_max;
+	double x_gp, y_gp;
+	long longest_profile = -1;
+	long easternmost_full_profile;
+	unsigned char save_byte;
+	struct profile  {
+		double x_gp;
+		double y_gp;
+		long num_samples;
+		short *data;
+	} *profiles;
+
+
+	/*
+	 * Drawmap doesn't contain code to handle anything other than south-to-north
+	 * profiles.  As far as I know, none of the data currently use anything else.
+	 * Check, just to be sure.
+	 */
+	if (dem_a->angle != 0.0)  {
+		fprintf(stderr, "DEM data oriented at a non-zero angle.  Ignoring file.\n");
+		return 1;
+	}
+
+
+	/*
+	 * Make sure that the UTM zone information isn't bogus.
+	 */
+	if ((dem_a->zone < 1) || (dem_a->zone > 60))  {
+		fprintf(stderr, "DEM file contains a bad UTM zone (%d).  File ignored.\n", dem_a->zone);
+		return 1;
+	}
+
+
+	/*
+	 * We need to find the location of the first elevation sample in the first
+	 * profile.  This procedure is laid out in detail in the DEM standards documents,
+	 * complete with nice pictures of the geometry, so I won't describe all of the
+	 * details here.  Basically, though, the samples are at UTM coordinates that are
+	 * evenly divisible by the 30-meter sample spacing (or divisible by 10 meters if
+	 * the sample spacing is 10 meters).  We need to find the first set of coordinates
+	 * that have round-numbered values just inside the SW corner.  The procedure varies
+	 * depending on whether the data block is west or east of the central meridian.
+	 *
+	 * Actually, we don't need to do this the hard way, since each profile header
+	 * contains the starting UTM coordinates of the profile.  However, the method is
+	 * worth encapulating here in case we need to do something like it later.  The
+	 * method comes straight from the DEM standards documents.
+	 */
+//	if ((0.5 * (dem_a->sw_x + dem_a->se_x)) < 500000.0)  {
+//		/* West of central meridian. */
+//		sw_x = dem_a->x_res * ceil(dem_a->sw_x / dem_a->x_res);
+//		m = (dem_a->se_y - dem_a->sw_y) / (dem_a->se_x - dem_a->sw_x);
+//		b = dem_a->sw_y - m * dem_a->sw_x;
+//		sw_y = dem_a->y_res * ceil((b + m * sw_x) / dem_a->y_res);
+//	}
+//	else  {
+//		/* East of central meridian. */
+//		sw_x = dem_a->x_res * ceil(dem_a->nw_x / dem_a->x_res);
+//		m = (dem_a->nw_y - dem_a->sw_y) / (dem_a->nw_x - dem_a->sw_x);
+//		b = dem_a->sw_y - m * dem_a->sw_x;
+//		sw_y = dem_a->y_res * ceil((b + m * sw_x) / dem_a->y_res);
+//	}
+
+
+	/*
+	 * Convert UTM coordinates of corners into latitude/longitude pairs.
+	 */
+	(void)redfearn_inverse(dem_datum, dem_a->sw_x_gp, dem_a->sw_y_gp, dem_a->zone, &(dem_corners->sw_lat), &(dem_corners->sw_long));
+	(void)redfearn_inverse(dem_datum, dem_a->nw_x_gp, dem_a->nw_y_gp, dem_a->zone, &(dem_corners->nw_lat), &(dem_corners->nw_long));
+	(void)redfearn_inverse(dem_datum, dem_a->ne_x_gp, dem_a->ne_y_gp, dem_a->zone, &(dem_corners->ne_lat), &(dem_corners->ne_long));
+	(void)redfearn_inverse(dem_datum, dem_a->se_x_gp, dem_a->se_y_gp, dem_a->zone, &(dem_corners->se_lat), &(dem_corners->se_long));
+	dem_corners->sw_x_gp = dem_a->sw_x_gp; dem_corners->sw_y_gp = dem_a->sw_y_gp;
+	dem_corners->nw_x_gp = dem_a->nw_x_gp; dem_corners->nw_y_gp = dem_a->nw_y_gp;
+	dem_corners->ne_x_gp = dem_a->ne_x_gp; dem_corners->ne_y_gp = dem_a->ne_y_gp;
+	dem_corners->se_x_gp = dem_a->se_x_gp; dem_corners->se_y_gp = dem_a->se_y_gp;
+	// fprintf(stderr, "%g %g %d      %g %g\n", dem_corners->sw_x_gp, dem_corners->sw_y_gp, dem_a->zone, dem_corners->sw_lat, dem_corners->sw_long);
+	// fprintf(stderr, "%g %g %d      %g %g\n", dem_corners->se_x_gp, dem_corners->se_y_gp, dem_a->zone, dem_corners->se_lat, dem_corners->se_long);
+	// fprintf(stderr, "%g %g %d      %g %g\n", dem_corners->ne_x_gp, dem_corners->ne_y_gp, dem_a->zone, dem_corners->ne_lat, dem_corners->ne_long);
+	// fprintf(stderr, "%g %g %d      %g %g\n", dem_corners->nw_x_gp, dem_corners->nw_y_gp, dem_a->zone, dem_corners->nw_lat, dem_corners->nw_long);
+
+	/*
+	 * If the DEM data don't overlap the image, then ignore them.
+	 *
+	 * If the user didn't specify latitude/longitude ranges for the image,
+	 * then we simply use this DEM to determine those boundaries.  In this
+	 * latter case, no overlap check is necessary (or possible) since the
+	 * image boundaries will be determined later.
+	 *
+	 * Actually, no overlap check is needed, anyway, since the main routine
+	 * will ignore data that is out of bounds.  But we can save a whole
+	 * lot of processing if we can detect out-of-bounds data here.
+	 *
+	 * Because the quads are not rectangular, we might reject a quad that
+	 * slightly overlaps the image, but we won't reject one that overlaps
+	 * by very much.
+	 */
+	if (image_corners->sw_lat < image_corners->ne_lat)  {
+		/* The user has specified image boundaries.  Check for overlap. */
+		if ((dem_corners->sw_lat >= image_corners->ne_lat) || ((dem_corners->ne_lat) <= image_corners->sw_lat) ||
+		    (dem_corners->sw_long >= image_corners->ne_long) || ((dem_corners->ne_long) <= image_corners->sw_long))  {
+			return 1;
+		}
+	}
+
+	/*
+	 * Elevation samples are stored in south-to-north profiles, that move
+	 * from west to east.
+	 *
+	 * The number of south-to-north profiles is specified in the main Type A record.
+	 * We have already parsed the Type A record, so we know this number.
+	 * We need to read some Type B records before
+	 * we will know the number of elevations in each profile.
+	 */
+	dem_size_x = dem_a->cols;
+
+
+	/*
+	 * We read in all of the data first.  The data is stored in a list of lists.
+	 * We begin by allocating space for a profiles[] array.  Each element of the array is
+	 * a structure that contains information about a given profile, including a pointer
+	 * to an array containing the actual elevation data for the profile.
+	 *
+	 * Later, we will convert this information into a two-dimensional array of elevation
+	 * data.  We follow this two-step process because the quads aren't, in general,
+	 * rectangular; but we would like to pass a rectangular array back to the calling
+	 * process for insertion into the image.  By reading in all of the data first, we
+	 * accumulate enough information to build an appropriate rectangular array, which
+	 * is of the right size to hold every data point, and with the right
+	 * parameters to represent the correct geometry.
+	 */
+	profiles = (struct profile *)malloc(sizeof(struct profile) * dem_size_x);
+	if (profiles == (struct profile *)0)  {
+		fprintf(stderr, "malloc of *profiles failed\n");
+		exit(0);
+	}
+
+
+	/*
+	 * The first and/or last profile of elevations are likely to be smaller than the
+	 * others because they can intersect the neatline at one end or the other.
+	 * (In fact, the first or last profile can actually contain zero elevations if
+	 * it intersects the neatlines at a corner of the quad.  The quads take the form
+	 * of quadrilaterals, which generally have no two sides of the same length.
+	 * The imaginary line segments that bound each quadrilateral are called the neatlines.)
+	 *
+	 * We have allocated temporary space for the profiles[] array.  Now we loop through
+	 * the profiles, allocating space for the actual elevation arrays, and reading in
+	 * and converting all profile data.
+	 */
+	for (i = 0; i < dem_size_x; i++)  {
+		/* Read in the first record of the profile.  It contains header information describing the profile. */
+		if ((ret_val = read_function(dem_fdesc, buf, DEM_RECORD_LENGTH)) < 144)  {
+			fprintf(stderr, "read from DEM file returns %d\n", ret_val);
+			exit(0);
+		}
+		if ((buf[ret_val - 1] == '\n') || (buf[ret_val - 1] == '\r')) ret_val--;
+		if ((buf[ret_val - 1] == '\n') || (buf[ret_val - 1] == '\r')) ret_val--;
+
+		/* Parse the relevant header information from the front of the record. */
+		save_byte = buf[18]; buf[18] = '\0'; profiles[i].num_samples = strtol(&buf[12], (char **)0, 10); buf[18] = save_byte;
+		save_byte = buf[24]; buf[24] = '\0'; profile_columns = strtol(&buf[18], (char **)0, 10); buf[24] = save_byte;
+		for (j = 24; j < 72; j++)  {
+			/* The DEM files use both 'D' and 'E' for exponentiation.  strtod() expects 'E' or 'e'. */
+			if (buf[j] == 'D') buf[j] = 'E';
+		}
+		save_byte = buf[48]; buf[48] = '\0'; profiles[i].x_gp = strtod(&buf[24], (char **)0); buf[48] = save_byte;
+		save_byte = buf[72]; buf[72] = '\0'; profiles[i].y_gp = strtod(&buf[48], (char **)0); buf[72] = save_byte;
+		profile_rows = profiles[i].num_samples;
+		if (profiles[i].num_samples > longest_profile)  {
+			longest_profile = profiles[i].num_samples;
+		}
+
+		/*
+		 * Drawmap assumes that each profile has 1 column.
+		 * As far as I know, all data files have this characteristic, but check just in case.
+		 */
+		if (profile_columns != 1)  {
+			fprintf(stderr, "DEM profile %d has %d columns.  Drawmap cannot parse it.  File ignored.\n", i, profile_columns);
+			return 1;
+		}
+
+		if (profile_rows <= 0)  {
+			profiles[i].data = (short *)0;
+			/*
+			 * This print statement is included merely because I want to find
+			 * example files that have profiles with 0 samples.
+			 */
+//			fprintf(stderr, "FYI:  Profile %d out of %d has %d rows.\n", i + 1, dem_size_x, profile_rows);
+			continue;
+		}
+
+		/* Allocate space for the profile elevation data. */
+		profiles[i].data = (short *)malloc(sizeof(short) * profile_rows);
+		if (profiles[i].data == (short *)0)  {
+			fprintf(stderr, "malloc of profiles[%d].data failed\n", i);
+			exit(0);
+		}
+
+		/* The first record in a profile has 144-bytes of header on the front, so skip over it. */
+		k = 144;
+		for (j = 0; j < profile_rows; j++)  {
+			if ((k > (ret_val - 6)) || (buf[k + 5] == ' '))  {
+				/* 
+				 * We have run out of data in this record.
+				 * We need to read in another one.
+				 */
+				if ((ret_val = read_function(dem_fdesc, buf, DEM_RECORD_LENGTH)) < 6)  {
+					fprintf(stderr, "2 read from DEM file returns %d\n", ret_val);
+					exit(0);
+				}
+				if ((buf[ret_val - 1] == '\n') || (buf[ret_val - 1] == '\r')) ret_val--;
+				if ((buf[ret_val - 1] == '\n') || (buf[ret_val - 1] == '\r')) ret_val--;
+				k = 0;
+			}
+
+			save_byte = buf[k + 6]; buf[k + 6] = '\0'; profiles[i].data[j] = strtol(&buf[k], (char **)0, 10); buf[k + 6] = save_byte;
+			k += 6;
+		}
+	}
+
+
+	/*
+	 * Now we have all of the data stored away in memory.
+	 * The next step is to calculate the size of the two-dimensional array
+	 * that we need to pass the data back to the calling function.
+	 * The dimensions of the array come from the maximum and minimum UTM
+	 * coordinates.  The latitude/longitude spanned by the array come
+	 * from the maximum and minimum latitude/longitude.
+	 *
+	 * We check all data points around the edges of the data to find
+	 * the largest and smallest latitudes and longitudes, and the largest
+	 * and smallest UTM coordinates, x_gp and y_gp.  (We currently make
+	 * no use of the latitude/longitude values, but calculate them anyway,
+	 * for use in algorithm development and debugging.)
+	 *
+	 * In general the largest (smallest) UTM coordinate doesn't appear to
+	 * necessarily coincide with the largest (smallest) latitude/longitude.
+	 * The code that follows contains (commented out) instrumentation
+	 * to find the (i, j) coordinates of the largest and smallest dimensions.
+	 *
+	 * Note that, while we calculate all of this data, for possible future
+	 * use, not all of it is currently used.
+	 */
+	x_gp_min = 1.0e7;
+	y_gp_min = 1.0e8;
+	x_gp_max = -1.0e7;
+	y_gp_max = -1.0e8;
+	lat_min = 91.0;
+	long_min = 181.0;
+	lat_max = -91.0;
+	long_max = -181.0;
+	for (i = 0; i < dem_size_x; i++)  {
+		/* Check profiles along the western edge until we have checked a full-sized one. */
+		x_gp = profiles[i].x_gp;
+		y_gp = profiles[i].y_gp;
+		if (x_gp < x_gp_min)  {
+			x_gp_min = x_gp;
+//			x_gp_min_loc_i = i; x_gp_min_loc_j = -1;
+		}
+		if (x_gp > x_gp_max)  {
+			x_gp_max = x_gp;
+//			x_gp_max_loc_i = i; x_gp_max_loc_j = -1;
+		}
+
+		for (j = 0; j < profiles[i].num_samples; j++)  {
+			if (y_gp < y_gp_min)  {
+				y_gp_min = y_gp;
+//				y_gp_min_loc_i = i; y_gp_min_loc_j = j;
+			}
+			if (y_gp > y_gp_max)  {
+				y_gp_max = y_gp;
+//				y_gp_max_loc_i = i; y_gp_max_loc_j = j;
+			}
+			(void)redfearn_inverse(dem_datum, x_gp, y_gp, dem_a->zone, &f, &g);
+			if (f < lat_min)  {
+				lat_min = f;
+//				lat_min_loc_i = i; lat_min_loc_j = j;
+			}
+			if (f > lat_max)  {
+				lat_max = f;
+//				lat_max_loc_i = i; lat_max_loc_j = j;
+			}
+			if (g < long_min)  {
+				long_min = g;
+//				long_min_loc_i = i; long_min_loc_j = j;
+			}
+			if (g > long_max)  {
+				long_max = g;
+//				long_max_loc_i = i; long_max_loc_j = j;
+			}
+
+			y_gp += dem_a->y_res;
+		}
+		if (profiles[i].num_samples == longest_profile)  {
+			easternmost_full_profile = i;
+			break;
+		}
+	}
+	for ( ; i < dem_size_x; i++)  {
+		/* Check the two endpoints of each profile all the way to the eastern edge. */
+		if (profiles[i].x_gp < x_gp_min)  {
+			x_gp_min = profiles[i].x_gp;
+//			x_gp_min_loc_i = i; x_gp_min_loc_j = -1;
+		}
+		if (profiles[i].x_gp > x_gp_max)  {
+			x_gp_max = profiles[i].x_gp;
+//			x_gp_max_loc_i = i; x_gp_max_loc_j = -1;
+		}
+		if (profiles[i].y_gp < y_gp_min)  {
+			y_gp_min = profiles[i].y_gp;
+//			y_gp_min_loc_i = i; x_gp_min_loc_j = -1;
+		}
+		if (profiles[i].y_gp > y_gp_max)  {
+			y_gp_max = profiles[i].y_gp;
+//			y_gp_max_loc_i = i; x_gp_max_loc_j = -1;
+		}
+		(void)redfearn_inverse(dem_datum, profiles[i].x_gp, profiles[i].y_gp, dem_a->zone, &f, &g);
+		if (f < lat_min)  {
+			lat_min = f;
+//			lat_min_loc_i = i; lat_min_loc_j = 0;
+		}
+		if (f > lat_max)  {
+			lat_max = f;
+//			lat_max_loc_i = i; lat_max_loc_j = 0;
+		}
+		if (g < long_min)  {
+			long_min = g;
+//			long_min_loc_i = i; long_min_loc_j = 0;
+		}
+		if (g > long_max)  {
+			long_max = g;
+//			long_max_loc_i = i; long_max_loc_j = 0;
+		}
+
+		y_gp = profiles[i].y_gp + dem_a->y_res * (double)(profiles[i].num_samples - 1);
+		if (y_gp < y_gp_min)  {
+			y_gp_min = y_gp;
+//			y_gp_min_loc_i = i; x_gp_min_loc_j = -1;
+		}
+		if (y_gp > y_gp_max)  {
+			y_gp_max = y_gp;
+//			y_gp_max_loc_i = i; x_gp_max_loc_j = -1;
+		}
+		(void)redfearn_inverse(dem_datum, profiles[i].x_gp, y_gp, dem_a->zone, &f, &g);
+		if (f < lat_min)  {
+			lat_min = f;
+//			lat_min_loc_i = i; lat_min_loc_j = profiles[i].num_samples - 1;
+		}
+		if (f > lat_max)  {
+			lat_max = f;
+//			lat_max_loc_i = i; lat_max_loc_j = profiles[i].num_samples - 1;
+		}
+		if (g < long_min)  {
+			long_min = g;
+//			long_min_loc_i = i; long_min_loc_j = profiles[i].num_samples - 1;
+		}
+		if (g > long_max)  {
+			long_max = g;
+//			long_max_loc_i = i; long_max_loc_j = profiles[i].num_samples - 1;
+		}
+		if (profiles[i].num_samples == longest_profile)  {
+			easternmost_full_profile = i;
+		}
+	}
+	for (i = easternmost_full_profile; i < dem_size_x; i++)  {
+		x_gp = profiles[i].x_gp;
+		y_gp = profiles[i].y_gp + dem_a->y_res;
+		if (x_gp < x_gp_min)  {
+			x_gp_min = x_gp;
+//			x_gp_min_loc_i = i; x_gp_min_loc_j = -1;
+		}
+		if (x_gp > x_gp_max)  {
+			x_gp_max = x_gp;
+//			x_gp_max_loc_i = i; x_gp_max_loc_j = -1;
+		}
+
+		/* Check the profiles along the eastern edge. */
+		for (j = 1; j < (profiles[i].num_samples - 1); j++)  {
+			if (y_gp < y_gp_min)  {
+				y_gp_min = y_gp;
+//				y_gp_min_loc_i = i; y_gp_min_loc_j = j;
+			}
+			if (y_gp > y_gp_max)  {
+				y_gp_max = y_gp;
+//				y_gp_max_loc_i = i; y_gp_max_loc_j = j;
+			}
+			(void)redfearn_inverse(dem_datum, x_gp, y_gp, dem_a->zone, &f, &g);
+			if (f < lat_min)  {
+				lat_min = f;
+//				lat_min_loc_i = i; lat_min_loc_j = j;
+			}
+			if (f > lat_max)  {
+				lat_max = f;
+//				lat_max_loc_i = i; lat_max_loc_j = j;
+			}
+			if (g < long_min)  {
+				long_min = g;
+//				long_min_loc_i = i; long_min_loc_j = j;
+			}
+			if (g > long_max)  {
+				long_max = g;
+//				long_max_loc_i = i; long_max_loc_j = j;
+			}
+
+			y_gp += dem_a->y_res;
+		}
+	}
+//	fprintf(stderr, "\n");
+//	fprintf(stderr, "x_gp_min_loc_i = %d      x_gp_min_loc_j = %d\n", x_gp_min_loc_i, x_gp_min_loc_j);
+//	fprintf(stderr, "long_min_loc_i = %d      long_min_loc_j = %d\n", long_min_loc_i, long_min_loc_j);
+//	fprintf(stderr, "\n");
+//	fprintf(stderr, "y_gp_min_loc_i = %d      y_gp_min_loc_j = %d\n", y_gp_min_loc_i, y_gp_min_loc_j);
+//	fprintf(stderr, "lat_min_loc_i = %d      lat_min_loc_j = %d\n", lat_min_loc_i, lat_min_loc_j);
+//	fprintf(stderr, "\n");
+//	fprintf(stderr, "x_gp_max_loc_i = %d      x_gp_max_loc_j = %d\n", x_gp_max_loc_i, x_gp_max_loc_j);
+//	fprintf(stderr, "long_max_loc_i = %d      long_max_loc_j = %d\n", long_max_loc_i, long_max_loc_j);
+//	fprintf(stderr, "\n");
+//	fprintf(stderr, "y_gp_max_loc_i = %d      y_gp_max_loc_j = %d\n", y_gp_max_loc_i, y_gp_max_loc_j);
+//	fprintf(stderr, "lat_max_loc_i = %d      lat_max_loc_j = %d\n", lat_max_loc_i, lat_max_loc_j);
+//	fprintf(stderr, "\n");
+
+	/*
+	 * We now know the extent of the data in terms of both UTM coordinates and
+	 * latitude/longitude coordinates.
+	 *
+	 * This gives us everything we need to set up the data array that will be
+	 * returned to the calling function.
+	 *
+	 * We store the data in an array that is based on the UTM coordinates
+	 * since that is the native mode for the data.  The calling function
+	 * can decide whether to leave them that way or inverse project them
+	 * onto a latitude/longitude grid.
+	 */
+	if (dem_size_x != round(1.0 + (x_gp_max - x_gp_min) / dem_a->x_res))  {
+		fprintf(stderr, "Number of profiles (%d) in data does not match actual data extent (%d).  File ignored.\n",
+			dem_size_x, round(1.0 + (x_gp_max - x_gp_min) / dem_a->x_res));
+	}
+	dem_size_y = round(1.0 + (y_gp_max - y_gp_min) / dem_a->y_res);
+
+	/*
+	 * Set up the array to be returned to the calling function.
+	 */
+	dem_corners->x_gp_min = x_gp_min;
+	dem_corners->y_gp_min = y_gp_min;
+	dem_corners->x_gp_max = x_gp_max;
+	dem_corners->y_gp_max = y_gp_max;
+	dem_corners->x = dem_size_x;
+	dem_corners->y = dem_size_y;
+
+	//fprintf(stderr, "x=%d    y=%d    x_range=%.8g - %.8g    y_range=%.9g - %.9g    lat_range=%.7g - %.7g    long_range=%.8g - %.8g\n",
+	//	dem_size_x, dem_size_y, x_gp_min, x_gp_max, y_gp_min, y_gp_max, lat_min, lat_max, long_min, long_max);
+
+	dem_corners->ptr = (short *)malloc(sizeof(short) * dem_size_x * dem_size_y);
+	if (dem_corners->ptr == (short *)0)  {
+		fprintf(stderr, "malloc of dem_corners->ptr failed\n");
+		exit(0);
+	}
+
+
+	/*
+	 * Initialize the new array to HIGHEST_ELEVATION.
+	 */
+	for (j = 0; j < (dem_size_x * dem_size_y); j++)  {
+		(dem_corners->ptr)[j] = HIGHEST_ELEVATION;
+	}
+
+
+	/*
+	 * Now that we have a storage array, we need to stuff the parsed elevations from
+	 * the various profiles into the appropriate locations in the array.
+	 *
+	 * The data is stored such that, somewhere in the i==0 column of the array,
+	 * is/are the sample(s) with the lowest x_gp value (x_gp_min).  Somewhere in
+	 * the bottom row is/are the sample(s) with the lowest y_gp value (y_gp_min).
+	 * The rows and columns of the array are spaced by dem_a->y_res and dem_a->x_res,
+	 * respectively.  Thus, given the data in dem_corners, and in the array, the calling
+	 * function can reconstruct the exact x_gp and y_gp coordinates, and the elevation,
+	 * of each data point.  How the calling function maps this data into an image
+	 * is none of our concern.  Our job is merely to return the parsed data in a usable form.
+	 */
+	for (i = 0; i < dem_size_x; i++)  {
+		/* The profile doesn't have to begin at the lowest y_gp.  Find the starting offset. */
+		k = round((profiles[i].y_gp - y_gp_min) / dem_a->y_res);
+
+		for (j = 0; j < profiles[i].num_samples; j++)  {
+			sptr = (dem_corners->ptr + (dem_size_y - 1 - j - k) * dem_size_x + i);
+			if (*sptr != HIGHEST_ELEVATION)  {
+				fprintf(stderr, "FYI:  Overwrite in process_utm_dem at (%d, %d)\n", x, y);
+			}
+			*sptr = profiles[i].data[j];
+			if ((*sptr == 32767) || (*sptr == -32767))  {
+				/*
+				 * Some non-SDTS DEM files appear to mark non-valid data with
+				 * either the value 32767 or -32767.  At least for some files,
+				 * these may represent void_fill and/or edge_fill values, but
+				 * I don't know for sure.  One thing is certain:  they aren't valid
+				 * elevations.  Normally, in drawmap, we convert edge_fill values
+				 * into HIGHEST_ELEVATION, and convert void_fill to an elevation
+				 * of zero.  However, since it isn't clear what these values
+				 * represent, we convert them into HIGHEST_ELEVATION.
+				 * At least that way, the rest of drawmap doesn't have to deal
+				 * with this quirk.
+				 */
+				*sptr = HIGHEST_ELEVATION;
+			}
+			else if (dem_a->elev_units == 1)  {
+				/*
+				 * The main body of drawmap likes to work in meters.
+				 * We satisfy that desire by changing feet into meters
+				 * before passing the data back.
+				 *
+				 * We alter the header information below, after all data
+				 * points have been processed.
+				 */
+				*sptr = (short)round((double)*sptr * 0.3048);
+			}
+		}
+		free(profiles[i].data);
+	}
+	free(profiles);
+
+	/*
+	 * The main body of drawmap likes to work in meters.
+	 * We satisfy that desire by changing feet into meters
+	 * before passing the data back.
+	 *
+	 * Here we change the header information.  We already changed the
+	 * actual elevation data above.
+	 */
+	if (dem_a->elev_units == 1)  {
+		dem_a->elev_units = 2;
+	}
+
+
+
+// For debugging.
+//	for (i = 0; i < dem_size_x; i++)  {
+//		for (j = 0; j < dem_size_y; j++)  {
+//			if (*(dem_corners->ptr + j * dem_size_x + i) == HIGHEST_ELEVATION)  {
+//				fprintf(stderr, "FYI:  HIGHEST_ELEVATION at %d %d\n", i, j);
+//			}
+//		}
+//	}
+
+	return 0;
+}

Added: packages/drawmap/branches/upstream/current/dem.h
===================================================================
--- packages/drawmap/branches/upstream/current/dem.h	                        (rev 0)
+++ packages/drawmap/branches/upstream/current/dem.h	2007-05-18 12:29:03 UTC (rev 834)
@@ -0,0 +1,660 @@
+/*
+ * =========================================================================
+ * dem.h - A header file to define parameters for DEM files.
+ * Copyright (c) 2000,2001  Fred M. Erickson
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2, or (at your option)
+ * any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program; if not, write to the Free Software
+ * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
+ * =========================================================================
+ */
+
+
+#define DEM_RECORD_LENGTH	1024
+
+/*
+ * This is the structure of the data in a DEM Logical Record Type A.
+ * It is generally the first record in a DEM, before the actual data
+ * starts.  It is 1024 bytes long, but not all bytes are used.  The
+ * old format ends at byte 864 (numbered starting from 1).  The new
+ * format is basically the same, but adds additional data after byte 864.
+ * Normally, the record is padded with blanks to round it out to 1024
+ * bytes.  However, some people put a newline at the end of the valid
+ * data, and don't include the padding, in spite of the fact that the
+ * standards document specifically says to pad with blanks.
+ * (In fact, according to the standard, bytes 1021-1024 of every DEM
+ * record, of any type, should contain blanks.)
+ *
+ * No attempt has been made to make the sizes of individual variables in this
+ * structure match the sizes specified in the DEM specification.  (For example,
+ * using short integers for record elements specified as two-byte integers.)
+ * Memory isn't at such a premium that we need to use short instead of long, or float
+ * instead of double.
+ */
+struct dem_record_type_a  {
+	/*
+	 * The title block has somewhat variable contents, and does not always contain what is
+	 * officially specified.  With that warning, here is the official specification.
+	 * Note that bytes in the spec are numbered starting with 1 rather than 0, and that
+	 * what I call the title block occupies bytes 1 through 140.
+	 *	1-40: file name		(SDTS location:  IDEN/IDEN/TITL)
+	 *          (The SDTS subfield also normally contains the map scale, and typically looks like:
+	 *           "BOZEMAN, MT - 24000")
+	 *	41-80: free-format text	(SDTS location:  DQHL/DQHL/COMT, record 1)
+	 *	81-109: blank fill
+	 *	110-135: SE corner longitude/latitude in SDDDMMSS.SSSS format
+	 *          (SDTS location:  IDEN/IDEN/DAID)
+	 *          (The SDTS subfield also contains the map scale, and typically looks like:
+	 *           "LAT:: 45.625 LONG:: -111 SCALE:: 24000")
+	 *	136: process code (single integer)
+	 *             0= "Unspecified."
+	 *             1= "Autocorrelation RESAMPLE Simple bilinear."
+	 *             2= "Manual Profiling (GRIDEM) from stereomodels; Simple bilinear."
+	 *             3= "DLG/hypsography CTOG 8-direction linear."
+	 *             4= "Interpolation from photogrammetric system contours DCASS 8-direction linear."
+	 *             5= "DLG/hypsography LINETRACE, LT4X Complex linear."
+	 *             6= "DLG/hypsography CPS-3, ANUDEM, GRASS Complex polynomial."
+	 *             7= "Electronic imaging (non-photogrammetric), active or passive, sensor systems."
+	 *		(SDTS location:  DQHL/DQHL/COMT, record 2)
+	 *		(The SDTS subfield typically looks like:
+	 *		 "PROCESS CODE 1: PROCESS USED:  Autocorrelation RESAMPLE Simple bilinear.")
+	 *	137: blank fill
+	 *	138-140: sectional indicator, specific to 30-minute DEMs
+	 *       (SDTS location:  IDEN/IDEN/TITL)
+	 */
+	char	title[80];	// Includes the free-format text, if we choose to pry it out.  We usually don't so choose.
+	double	se_lat;
+	double	se_long;
+	long	process_code;
+//	char	sectional_ind[3];
+	/*
+	 * Free format 4-byte origin code.
+	 *  NMD = "DEM PRODUCER: National Mapping Division, Reston, VA."
+	 *  EMC = "DEM PRODUCER: Eastern Mapping Center (Mapping Applications Center), Reston, VA."
+	 *  WMC = "DEM PRODUCER: Western Mapping Center, Menlo Park, CA."
+	 * MCMC = "DEM PRODUCER: Mid-Continent Mapping Center, Rolla, MO."
+	 * RMMC = "DEM PRODUCER: Rocky Mountain Mapping Center, Denver, CO."
+	 *   FS = "DEM PRODUCER: Forest Service"
+	 * GPM2 = "DEM PRODUCER: Gestalt Photo Mapper low resolution DEM."
+	 * CONT = "DEM PRODUCER: Contractor."
+	 * <  > = "DEM PRODUCER: <  >."
+	 * blank= "DEM PRODUCER: Unspecified."
+	 *
+	 * (SDTS location:  DQHL/DQHL/COMT, record 3)
+	 * (The SDTS subfield typically looks like:
+	 *  "DEM PRODUCER:  Unspecified.")
+	 */
+	char	origin_code[4];	// bytes 141-144
+	/*
+	 * DEM level.  Can take values from 1 through 4.
+	 *
+	 * (SDTS location:  DQHL/DQHL/COMT, record 4.  Also DQPA/DQPA/COMT, record 1.)
+	 * (The SDTS subfield typically looks like:
+	 *  "DEM LEVEL 1 means: DEM created by auto correlation or manual profiling from aerial photographs.  Source photography is typically from National Aerial Photography Program or National High Altitude Photography Program.  30-minute DEM's may be derived or resampled from level 1 7.5-minute DEM's."
+	 */
+	long	level_code;	// bytes 145-150
+	/*
+	 * 1=regular.  2=random, reserved for future use.
+	 *
+	 * (SDTS location:  RSDF/RSDF/OBRP)
+	 * (The SDTS subfield typically looks like:
+	 *  "G2"
+	 *  Note that the conversion spec says this should be "G2" for regular, and "" for random.
+	 *  I think that this basically means random is still unused and reserved for future use.)
+	 */
+	long	elevation_pattern;	// bytes 151-156
+	/*
+	 * Planimetric reference system.
+	 *   0=Geographic (lat/long)
+	 *   1=Universal Transverse Mercator (UTM)
+	 *   2=State Plane Coordinate System
+	 *   3-20 defined in spec.
+	 *
+	 * (SDTS location:  XREF/XREF/RSNM)
+	 * (The SDTS subfield typically looks like:
+	 *  "UTM"
+	 *  Note:  valid SDTS values are "UTM", "GEO", and "SPCS".  Codes 3-20 are apparently unsupported.)
+	 */
+	long	plane_ref;	// bytes 157-162
+	/*
+	 * This is the zone for state plane and UTM systems.  It is zero for the geographic system.
+	 *
+	 * (SDTS location:  XREF/XREF/ZONE)
+	 * (The SDTS subfield typically looks like:
+	 *  "12")
+	 */
+	long	zone;	// bytes 163-168
+	/*
+	 * Map projection parameters are in bytes 169-528.  There are 15 fields of
+	 * floating-point data.  Since these field are set to zero when plane_ref = 0,1,2
+	 * we don't normally care much about them.  In SDTS, they aren't encoded.
+	 */
+//	double p1, p2, p3, p4, p5, p6, p7, p8, p9, p10, p11, p12, p13, p14, p15;
+	/*
+	 * Unit of measure for planimetric coordinates.
+	 *  0=radians
+	 *  1=feet
+	 *  2=meters
+	 *  3=arc-seconds
+	 *
+	 * (These aren't encoded in SDTS.  SDTS simply requires that the units
+	 * be meters for UTM and decimal degrees for geographic units.)
+	 */
+	long plane_units;	// bytes 529-534
+	/*
+	 * Unit of measure for elevation coordinates.
+	 * 1=feet
+	 * 2=meters
+	 *
+	 * (SDTS location:  DDSH/DDSH/UNIT.  Also in DDOM/DDOM/ADMU, in association with individual data items, where it may be in lower case.)
+	 * (The SDTS subfield typically looks like:
+	 *  "METERS".
+	 *  Valid SDTS values are "FEET" and "METERS".)
+	 */
+	long elev_units;	// bytes 535-540
+	/*
+	 * Number of sides of the bounding polygon for the DEM file.
+	 * Presently, appears to always be set to 4.
+	 * This is not encoded in SDTS since it is always 4.
+	 */
+//	long num_sides;	// bytes 541-546
+	/*
+	 * Four pairs of floating point numbers defining the four corners of the
+	 * bounding quadrangle.  Clockwise from sw corner.  Stored as eastings and
+	 * northings (with false eastings/northings included).  Bytes 547-738
+	 *
+	 * (SDTS location:  SPDM/DMSA/X, SPDM/DMSA/Y)
+	 * (The SDTS subfield typically looks like:
+	 *  "490255.500000")
+	 * (There is also information in SPDM/SPDM/DTYP indicating that the 4 points form a RING.)
+	 */
+	double sw_x_gp;	// (the _gp stands for ground planimetric)
+	double sw_y_gp;
+	double nw_x_gp;
+	double nw_y_gp;
+	double ne_x_gp;
+	double ne_y_gp;
+	double se_x_gp;
+	double se_y_gp;
+	/*
+	 * A pair of floating point numbers giving the minimum and maximum elevations.
+	 * Bytes 739-786
+	 *
+	 * (SDTS location:  DDOM/DDOM/DVAL.  The two values normally seem to be in records
+	 * 3 and 4.  They can also be distinguished by looking at DDOM/DDOM/RAVA, which will
+	 * contain "MIN" or "MAX".)
+	 * (The SDTS subfield typically looks like:
+	 *  "1367")
+	 */
+	long min_elev;
+	long max_elev;
+	/*
+	 * Angle between primary axis of ground planimetric reference and primary
+	 * axis of DEM local reference system.
+	 * Should always be zero for arc-second-structured DEMs.  Is probably zero
+	 * for UTM-structured DEMs, but could be 90 degrees if some non-USGS someone
+	 * has decided to change over to storing the profiles as rows rather than columns.
+	 * The USGS apparently always sets this to zero.  In SDTS it is not encoded.
+	 */
+	double angle;	// bytes 787-810
+	/*
+	 * Accuracy code for elevations.
+	 * 0=unknown
+	 * 1=accuracy information is given in logical record type C.
+	 *
+	 * (SDTS location:  DQPA/DQPA/COMT, record 2)
+	 * (The SDTS subfield typically looks like:
+	 *  "ACCURACY: RMSE of the DEM data relative to the file's datum (x,y,z) is (0, 0, 3); accuracy has been calculated based on a sample size of 20."
+	 *  or
+	 *  "ACCURACY: RMSE of the file's datum relative to the absolute datum (x,y,z) is (0, 0, 0); accuracy is estimated, not computed.  RMSE of the DEM data relative to the file's datum (x,y,z) is (0, 0, 1); accuracy has been calculated based on a sample size of 30."
+	 *  or
+	 *  "ACCURACY: Unspecified."
+	 * Actually, this field is pretty variable in appearance, and hard to pry data out of reliably.)
+	 */
+	long accuracy;	// bytes 811-816
+	/*
+	 * Three-element array of spatial-resolution units.  These are only permitted to
+	 * be integer units, but are stored as floating point.  For a 1-degree DEM, they
+	 * are typically 3,3,1.  For non-Alaska 7.5min dem, they are typically 30,30,1 or 10,10,1.
+	 * Bytes 817-852.
+	 *
+	 * (SDTS location:  IREF/IREF/XHRS, IREF/IREF/YHRS, DDSH/DDSH/PREC)
+	 * (The SDTS subfield typically looks like:
+	 *  "30.000000")
+	 */
+	double x_res;
+	double y_res;
+	double z_res;
+	/*
+	 * Number of rows and columns (m,n) of profiles in the DEM.
+	 * The standard isn't really clear on what happens if m != 1.
+	 * However, it says that, when m == 1, then n is equal to the
+	 * number of rows in DEM file.  In my experience, m has always
+	 * been equal to 1 in the main DEM header.  The true column values
+	 * appear at the beginning of each profile.
+	 * Bytes 853-864.
+	 *
+	 * (SDTS location:  LDEF/LDEF/NROW, LDEF/LDEF/NCOL.  Also in RSDF/RSDF/RWXT, RSDF/RSDF/CLXT)
+	 * (The SDTS subfield typically looks like:
+	 *  "325")
+	 */
+	long cols;
+	long rows;
+	/*
+	 * This concludes the info in the old format.
+	 * The new format tacks on some extra fields.
+	 * The new fields follow.
+	 */
+	/*
+	 * Largest primary contour interval.
+	 * (Present only if two or more primary intervals exist (level 2 DEMs only).)
+	 * Bytes 865-869.
+	 *
+	 * This is apparently only rarely present.  Haven't found an example yet.
+	 */
+//	long largest_contour;
+	/*
+	 * Source contour interval units.
+	 * Corresponds to the units of the map largest primary contour interval. (level 2 DEMs only.)
+	 * Byte 870.
+	 *  0=NA
+	 *  1=feet
+	 *  2=meters
+	 *
+	 * This is apparently only rarely present.  Haven't found an example yet.
+	 */
+//	long largest_contour_units;
+	/*
+	 * Smallest primary contour interval.
+	 * Smallest or only primary contour interval (level 2 DEMs only).
+	 * Bytes 871-875.
+	 *
+	 * (SDTS location:  DQPA/DQPA/COMT, record 3.
+	 * (The SDTS subfield typically looks like:
+	 *  "CONTOUR INTERVAL:  Unspecified."
+	 *  or
+	 *  "CONTOUR INTERVAL:Primary contour interval of source is 20 feet.")
+	 */
+//	long smallest_contour;
+	/*
+	 * Source contour interval units.
+	 * Corresponds to the units of the map smallest primary contour interval. (level 2 DEMs only).
+	 * Byte 876.
+	 *  1=feet
+	 *  2=meters
+	 *
+	 * (In SDTS, the value is part of the text string for the contour itself.  See entry for smallest_contour.)
+	 */
+//	long smallest_contour_units;
+	/*
+	 * Data source data.
+	 * "YYYY" 4 character year, e.g. 1975, 1997, 2001, etc.
+	 * Synonymous with the original compilation date and/or the
+	 * date of the photography.
+	 * Bytes 877-880
+	 *
+	 * (SDTS location:  IDEN/IDEN/MPDT for map date.)
+	 * (The SDTS subfield typically looks like:
+	 *  "1999" or "19971121" and may not be present at all)
+	 *
+	 * (Also appears in DQHL/DQHL/COMT, record 5, in the
+	 * form: "SOURCE DATE OF PUBLISHED MAP OR PHOTOGRAPHY:  Unspecified"
+	 *   or: "SOURCE DATE OF PUBLISHED MAP OR PHOTOGRAPHY:  1981.")
+	 *
+	 * Apparently you are supposed to choose the latest of the available dates.
+	 */
+//	long source_date;
+	/*
+	 * Data inspection and revision date.
+	 * "YYYY" 4 character year.
+	 * Synonymous with the date of completion and/or the date of revision.
+	 * Bytes 881-884.
+	 * (SDTS location:  DQHL/DQHL/COMT, record 6, for inspection/revision date.)
+	 * (The SDTS subfield typically looks like:
+	 *  "INSPECTION FLAG: I, DATE THAT DEM WAS INSPECTED ON A DEM EDIT SYSTEM:  1999. RMSE computed from test points. Water body edits done. Visual inspection on DEM edit system and errors edited."
+	 *  and I guess you are supposed to pry the data out of this string.  You are supposed to find the latest
+	 *  available date, so there may be multiple records, or (more likely) there may sometimes be multiple
+	 *  dates embedded within this record.)
+	 */
+//	long revision_date;
+	/*
+	 * Inspection flag.
+	 * "I" indicates all processes of part3, Quality Control have been performed.
+	 * "R" indicates existing DEM has been revised and re-archived.
+	 * Byte 885.
+	 *
+	 * (SDTS location:  DQHL/DQHL/COMT, record 6)
+	 * (The SDTS subfield typically looks like:
+	 *  "INSPECTION FLAG: I, DATE THAT DEM WAS INSPECTED ON A DEM EDIT SYSTEM:  1999. RMSE computed from test points. Water body edits done. Visual inspection on DEM edit system and errors edited.")
+	 */
+//	long inspection_flag;
+	/*
+	 * Data validation flag.
+	 *  0 = No validation performed.
+	 *  1 = RMSE computed from test points, no quantitative test, no interactive DEM editing or review.
+	 *  2 = Batch process water body edit and RMSE computation.
+	 *  3 = Review and edit, including water edit.  No RMSE computed from test points.
+	 *  4 = Level 1 DEM's reviewed and edited.  Includes water body editing.  RMSE computed from test points.
+	 *  5 = Level 2 and 3 DEM's reviewed and edited.  Includes water body editing and verification or vertical integration of planimetric categories (other than hypsography or hydrography if authorized).  RMSE computed from test points.
+	 * Byte 886.
+	 *
+	 * (SDTS location:  DQHL/DQHL/COMT, record 7)
+	 * (The SDTS subfield typically looks like:
+	 *  "DATA VALIDATION FLAG: 5; Level 2 and 3 DEM's reviewed and edited.  Includes water body editing and verification or vertical integration of planimetric categories (other than hypsography or hydrography if authorized).  RMSE computed from test points."
+	 */
+//	long validation_flag;
+	/*
+	 * Suspect and void area flag.
+	 *  0 = none
+	 *  1 = suspect areas
+	 *  2 = void areas
+	 *  3 = suspect and void areas.
+	 * Byte 887-888.
+	 *
+	 * (SDTS location:  DQPA/DQPA/COMT, record 4, for suspect areas.)
+	 * (The SDTS subfield typically looks like:
+	 *  "SUSPECT AREAS:No suspect areas."
+	 *  or
+	 *  "SUSPECT AREAS:Suspect areas exist in the data.")
+	 *
+	 * (SDTS location:  DQCG/DQCG/COMT, record 1, for void areas.)
+	 * (The SDTS subfield typically looks like:
+	 *  "VOID AREAS:No void areas."
+	 *  or
+	 *  "VOID AREAS:Void areas exist in the data.")
+	 */
+//	long suspect_and_void_flag;
+	/*
+	 * Vertical datum.
+	 *  1 = local mean sea level
+	 *  2 = National Geodetic Vertical Datum 1929 (NGVD 29)
+	 *  3 = North American Vertical Datum 1988 (NAVD 88).
+	 * Byte 889-890.
+	 *
+	 * (SDTS location:  XREF/VATT/VDAT)
+	 * (The SDTS subfield typically looks like:
+	 *  "NGVD")
+	 * (Valid SDTS values are "LMSL", "NGVD", and "NAVD")
+	 */
+	long vertical_datum;
+	/*
+	 * Horizontal datum.
+	 *  1 = North American Datum 1927 (NAD 27)
+	 *  2 = World Geodetic System 1972 (WGS 72)
+	 *  3 = WGS 84
+	 *  4 = NAD 83
+	 *  5 = Old Hawaii Datum
+	 *  6 = Puerto Rico Datum
+	 * Bytes 891-892.
+	 *
+	 * (SDTS location:  XREF/XREF/HDAT)
+	 * (The SDTS subfield typically looks like:
+	 *  "NAS")
+	 * (Valid SDTS values are "NAS", "WGC", "WGE", "NAX", "OHD", and "PRD",
+	 * corresponding directly to the 6 numbers in the "classic" DEM format.)
+	 */
+	long horizontal_datum;
+	/*
+	 * Data Edition.  01-99.  Primarily A DMA specific field.  (For USGS use, set to 01).
+	 * Bytes 893-896.
+	 *
+	 * (SDTS location:  DQHL/DQHL/COMT, record ?)
+	 * Not certain where or how this appears, when it is present.  For the USGS, it is
+	 * always 01 and is not encoded in SDTS.
+	 */
+//	long data_edition;
+	/*
+	 * Percent Void.
+	 * If the Suspect and Void Area Flag indicates a void, this field (right justified)
+	 * contains the percentage of nodes in the file set to void (-32,767).
+	 * Bytes 897-900.
+	 *
+	 * (SDTS location:  DQCG/DQCG/COMT, record ?)
+	 * Not certain where or how this appears, when it is present.  Probably tacked onto
+	 * the end of the COMT in record 1, in the form: "##% of nodes in the data are set to void.")
+	 */
+//	long percent_void;
+	/*
+	 * Edge Match Flag.
+	 * Edge match status flag.  Ordered West, North, East, and South.  Described in
+	 * the DEM standards document.
+	 * Bytes 901-908.
+	 *
+	 * (SDTS location:  DQLC/DQLC/COMT, record 1)
+	 * (The SDTS subfield typically looks like:
+	 *  "EDGE MATCH STATUS: West  (1), North (1), East (1), South (1) Edge matching is a process of matching elevation values along common quadrangle edges. The objective of edge matching is to improve the alignment of ridges and drains, and overall topographic shaping and representation. Code of 0 = not edge matched; 1 = edge match checked and joined; 2 = not edge matched because adjoining DEM is on a different horizontal or vertical datum; 3 = not edge matched because the adjoining DEM is not part of the current project; 4 = not edge matched because the adjoining DEM has a different vertical unit.")
+	 */
+//	long edge_match_flag;
+	/*
+	 * Vertical Datum Shift.
+	 * Value is in the form of SFFF.DD
+	 * Value is the average shift value for the four quadrangle corners obtained from
+	 * program VERTCON.  Always add this value to convert to NAVD88.
+	 * Bytes 909-915.
+	 *
+	 * (SDTS location:  XREF/XREF/COMT)
+	 * (The SDTS subfield typically looks like:
+	 *  "Vertical datum shift= 1.220000; always add to convert from National Geodetic Vertical Datum 1929 to North American Vertical Datum 1988.")
+	 */
+	double vertical_datum_shift;
+	/*
+	 * These two elements are not present in the DEM Type A profile.  They have been
+	 * added to accommodate SDTS files.  (SDTS files differ from the old DEM format
+	 * in that the profiles run in rows from west to east, and each row is padded
+	 * out to full length, so that a single (x_gp, y_gp) pair is sufficient to
+	 * fix every elevation sample in space.)
+	 */
+	double x_gp_first;
+	double y_gp_first;
+	/*
+	 * These two elements are not present in the DEM Type A profile.  They have been
+	 * added to accommodate SDTS files.  (SDTS files differ from the old DEM format
+	 * in that the profiles run in rows from west to east, and each row is padded
+	 * out to full length.  These two values are the filler that are used to fill
+	 * unknown voids within the valid data, and to pad the edges of the data to
+	 * make the grid rectangular.)
+	 */
+	long void_fill;
+	long edge_fill;
+};
+
+
+
+/*
+ * This is the structure of the data in a DEM Logical Record Type B.
+ * It generally follows the first record in a DEM (the Type A record),
+ * and contains an actual elevation profile.
+ * It is nominally 1024 bytes long, with blank-padding at the end;
+ * but multiple 1024-byte chunks are generally required to contain an
+ * entire elevation profile.
+ *
+ * We don't currently use this structure, but it is included here
+ * so that the contents of the record can be documented.
+ */
+struct dem_record_type_b  {
+	/*
+	 * A two-element array containing the row and column identification number of the DEM profile
+	 * contained in this record.  The row and column numbers may range from 1 to m and 1 to n.
+	 * The row number is normally set to 1.  The column identification is the profile sequence number.
+	 * Bytes 1-12.
+	 *
+	 * (SDTS location:  CEL0/CELL/ROWI, CEL0/CELL/COLI)
+	 * (The SDTS subfield typically looks like:
+	 *  "2")
+	 */
+	long row_number;
+	long column_number;
+	/*
+	 * A two-element array containing the number (m, n) of elevations in the DEM profile.
+	 * The first element in the field corresponds to the number of rows of nodes in this
+	 * profile.  The second element is set to 1, specifiying 1 column per B record.
+	 * Bytes 13-24.
+	 *
+	 * Not encoded in SDTS because all rows/columns are padded to the same size.
+	 */
+	long rows;
+	long columns;
+	/*
+	 * A two-element array containing the ground planimetric coordinates (x_gp, y_gp) of the first elevation
+	 * in the profile.
+	 * Bytes 25-72.
+	 *
+	 * Not encoded in SDTS, on a row-by-row basis, because all rows/columns are padded to the same size.
+	 * The values for the very first sample, at the northwest corner, can be obtained from
+	 * RSDF/SADR/X, RSDF/SADR/Y.  The locations of all other samples in the rectangular array
+	 * can be derived from this one location by using the x and y resolution values.
+	 */
+	double x_gp;
+	double y_gp;
+	/*
+	 * Elevation of local datum for the profile.
+	 * The values are in units of measure given by the elev_units element in the Type A record.
+	 * Bytes 73-96.
+	 *
+	 * Not encoded in SDTS.
+	 */
+	double datum_elev;
+	/*
+	 * A two-element array of minimum and maximum elevations for the profile.
+	 * The values are in the units of measure given by the elev_units element in the Type A record.
+	 * Bytes 97-144.
+	 *
+	 * Not encoded in SDTS.
+	 */
+	double min_elev;
+	double max_elev;
+	/*
+	 * An (m, n) array of elevations for the profile.  Elevations are expressed in units of resolution.
+	 * A maximum of six characters for each integer elevation value.  A value in this array would be
+	 * multiplied by the "z" spatial resolution (data element z_res in record Type A) and added to the
+	 * datum_elev value for this profile to obtain the elevation for the point.
+	 * Bytes 6x(146 or 170), 146 = max for first block, 170 = max for subsequent blocks.
+	 *
+	 * (SDTS location:  CEL0/CVLS/ELEVATION
+	 * (The SDTS subfield typically looks like:
+	 *  a 16-bit binary value, which must be converted to binary and properly swabbed.)
+	 */
+//	long *elevations;
+};
+
+
+
+/*
+ * This is the structure of the data in a DEM Logical Record Type C.
+ * The standards document doesn't clearly state (as near as I can tell)
+ * where the Type C record would appear in a file, although it does
+ * mention it in the context of a file "header".  However, inspection
+ * of a sample file shows that it is at the end of the file.
+ * It should be 1024 bytes long, since all DEM records are supposed
+ * to be blank-padded to 1024 bytes.
+ *
+ * We don't currently use this structure, but it is included here
+ * so that the contents of the record can be documented.
+ *
+ * In SDTS all of the useful values are encoded within long text
+ * strings in DQPA/DQPA/COMT.  See the accuracy description in
+ * connection with the type A record, above, for typical text strings.
+ */
+struct dem_record_type_c  {
+	/*
+	 * Code indicating availability of statistics in datum_rmse.
+	 * 1 = available, 0 = unavailable.
+	 * Bytes 1-6.
+	 *
+	 * Not encoded in SDTS.
+	 */
+	long	datum_stats_flag;
+	/*
+	 * RMSE of file's datum relative to absolute datum (x, y, z).
+	 * RMSE integer values are in the same unit of measure given by
+	 * elements plane_units and elev_units of record Type A.
+	 * Bytes 7-24.
+	 */
+	long datum_rmse_x;
+	long datum_rmse_y;
+	long datum_rmse_z;
+	/*
+	 * Sample size on which statistics in datum_rmse are based.
+	 * If 0, then accuracy will be assumed to be estimated rather than computed.
+	 * Bytes 25-30.
+	 */
+	long datum_sample_size;
+	/*
+	 * Code indicating availability of statistics in dem_data_rmse.
+	 * 1 = available, 0 = unavailable.
+	 * Bytes 31-36.
+	 */
+	long dem_stats_flag;
+	/*
+	 * RMSE of DEM data relative to file's datum (x, y, z).
+	 * RMSE integer values are in the same units of measure given by
+	 * elements plane_units and elev_units of record Type A.
+	 * Bytes 37-54.
+	 */
+	long dem_rmse_x;
+	long dem_rmse_y;
+	long dem_rmse_z;
+	/*
+	 * Sample size on which statistics in dem_rmse are based.
+	 * If 0, then accuracy will be assumed to be estimated rather than computed.
+	 * Bytes 55-60.
+	 */
+	long dem_sample_size;
+};
+
+
+/*
+ * This structure is for passing information about a block of DEM data
+ * between routines.  It defines two opposite corners of the data
+ * block, in terms of latitude and longitude.  It also defines the
+ * x-by-y size of the block, in terms of number of data points.
+ */
+struct dem_corners  {
+	short *ptr;		// A pointer to the block of memory containing the data
+
+	double sw_x_gp;		// UTM x for sw corner	(the _gp stands for ground planimetric coordinates)
+	double sw_y_gp;		// UTM y for sw corner
+	double nw_x_gp;		// UTM x for nw corner
+	double nw_y_gp;		// UTM y for nw corner
+	double ne_x_gp;		// UTM x for ne corner
+	double ne_y_gp;		// UTM y for ne corner
+	double se_x_gp;		// UTM x for se corner
+	double se_y_gp;		// UTM y for se corner
+
+	double sw_lat;		// latitude of sw corner
+	double sw_long;		// longitude of sw corner
+	double nw_lat;		// latitude of nw corner
+	double nw_long;		// longitude of nw corner
+	double ne_lat;		// latitude of ne corner
+	double ne_long;		// longitude of ne corner
+	double se_lat;		// latitude of se corner
+	double se_long;		// longitude of se corner
+
+	double x_gp_min;	// smallest UTM x
+	double y_gp_min;	// smallest UTM y
+	double x_gp_max;	// largest UTM x
+	double y_gp_max;	// largest UTM y
+
+	long x;			// number of samples in a row
+	long y;			// number of samples in a column
+};
+
+
+extern void parse_dem_a(unsigned char *, struct dem_record_type_a *, struct datum *);
+extern int parse_dem_sdts(unsigned char *, struct dem_record_type_a *, struct dem_record_type_c *, struct datum *, long);
+extern void print_dem_a(struct dem_record_type_a *);
+extern void print_dem_a_c(struct dem_record_type_a *, struct dem_record_type_c *);
+extern int process_geo_dem(int, ssize_t (*)(), struct image_corners *, struct dem_corners *, struct dem_record_type_a *, struct datum *datum);
+extern int process_utm_dem(int, ssize_t (*)(), struct image_corners *, struct dem_corners *, struct dem_record_type_a *, struct datum *datum);
+extern int process_sdts_dem(unsigned char *, struct image_corners *, struct dem_corners *, struct dem_record_type_a *, struct datum *datum);
+extern int process_gtopo30(char *, struct image_corners *, struct dem_corners *, struct dem_record_type_a *, struct datum *datum, long);

Added: packages/drawmap/branches/upstream/current/dem_sdts.c
===================================================================
--- packages/drawmap/branches/upstream/current/dem_sdts.c	                        (rev 0)
+++ packages/drawmap/branches/upstream/current/dem_sdts.c	2007-05-18 12:29:03 UTC (rev 834)
@@ -0,0 +1,1683 @@
+/*
+ * =========================================================================
+ * dem_sdts.c - Routines to handle DEM data.
+ * Copyright (c) 2000,2001  Fred M. Erickson
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2, or (at your option)
+ * any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program; if not, write to the Free Software
+ * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
+ * =========================================================================
+ */
+
+#include <stdlib.h>
+#include <stdio.h>
+#include <string.h>
+#include <math.h>
+#include <errno.h>
+#include "drawmap.h"
+#include "dem.h"
+#include "sdts_utils.h"
+
+
+/*
+ * The routines in this file are uniquely-dedicated to handling
+ * DEM files in the Spatial Data Transfer System (SDTS) format.
+ *
+ * For a general description of SDTS, see sdts_utils.c.
+ * For a description of the `classic' DEM format, and a discussion of
+ * where all of the data has been moved to in the SDTS format, see dem.h.
+ *
+ * The routines in this file are fairly repetitive.  They open an SDTS
+ * file, find the few things we care about, close the SDTS file,
+ * and move on to the next SDTS file.  Thus, there isn't a lot of
+ * unique code in this file, just a lot of tedious searching for data.
+ */
+
+
+
+/*
+ * Note to the reader of this code.  This code will probably be difficult
+ * to understand unless you are very familiar with the internals of SDTS files
+ * and `classic' DEM files.  Normally I would provide a lot of descriptive
+ * comments to help you along.  However, in this case, such comments would
+ * probably end up being several times the length of the code.  I wrote this
+ * program with two large documents available for reference.  If you want to
+ * follow the operation of the code, you will probably need those documents
+ * too.  The documents were:
+ *
+ * The Spatial Data Transfer Standard Mapping of the USGS Digital Elevation Model,
+ * 11/13/97 version 1, by Mid-Continent Mapping Center Branch of Research, Technology
+ * and Applications.
+ *
+ * Standards for Digital Elevation Models, US Department of the Interior,
+ * US Geological Survey, National Mapping Division, 8/97.
+ *
+ * There are comments at key points in the code, but they are not adequate
+ * for a full understanding unless you have the reference materials at hand.
+ *
+ * Even the documents aren't really enough.  It is also useful to have
+ * both sample SDTS files and sample `classic' DEM files for reference as well.
+ */
+
+
+
+/*
+ * This routine parses informational data from various SDTS files
+ * and inserts the converted data into the given storage structure.
+ * The storage structure is based on the type A header record from the old
+ * DEM format.  While this structure doesn't correpond to the layout
+ * of SDTS files, it is still a useful place to stuff the interesting
+ * data.  More importantly, it lets us parse both SDTS and non-SDTS files
+ * and still end up with the same data structure full of data.
+ *
+ * Here are the meanings of the various module names associated with DEM files:
+ *
+ * There is one module associated with Identification:
+ *   IDEN --- Identification
+ *
+ * Misc:
+ *   STAT --- Transfer Statistics
+ *   CATD --- Catalog/Directory
+ *   CATS --- Catalog/Spatial Domain
+ *   LDEF --- Layer Definition
+ *   RSDF --- Raster Definition
+ *
+ * There are five modules involved in data quality:
+ *   DQHL --- Data Quality/Lineage
+ *   DQPA --- Data Quality/Positional Accuracy
+ *   DQAA --- Data Quality/Attribute Accuracy
+ *   DQCG --- Data Quality/Completeness
+ *   DQLC --- Data Quality/Logical Consistency
+ *
+ * There are three data dictionary modules:
+ *   DDSH --- Data Dictionary/Schema
+ *   DDOM --- Data Dictionary/Domain
+ *   DDDF --- Data Dictionary/Definition
+ *
+ * There are three modules associated with spatial reference and domain:
+ *   XREF --- External Spatial Reference
+ *   IREF --- Internal Spatial Reference
+ *   SPDM --- Spatial Domain
+ *
+ * Files associated with data:
+ *   CELL --- Actual data
+ *
+ *
+ * Here are the particular items we are interested in, within this morass of files,
+ * given as module/field/subfield triples, along with the associated fields in the
+ * dem_a structure.  (Note that some fields, present in the old DEM files, are no
+ * longer present in SDTS.)
+ *	IDEN/IDEN/TITL	dem_a->title
+ *	DQPA/DQPA/COMT	dem_a->level_code		also in DQHL/DQHL/COMT
+ *	DQPA/DQPA/COMT	dem_a->accuracy
+ *	XREF/XREF/RSNM	dem_a->plane_ref
+ *	XREF/XREF/ZONE	dem_a->zone
+ *	XREF/XREF/HDAT	dem_a->horizontal_datum
+ *	NONE          	dem_a->plane_units		Old-format field not encoded in SDTS.  SDTS requires meters for UTM and decimal degrees for GEO
+ *	SPDM/DMSA/X   	dem_a->sw_x_gp			see also SPDM/SPDM/DTYP
+ *	SPDM/DMSA/Y   	dem_a->sw_y_gp
+ *	SPDM/DMSA/X   	dem_a->nw_x_gp
+ *	SPDM/DMSA/Y   	dem_a->nw_y_gp
+ *	SPDM/DMSA/X   	dem_a->ne_x_gp
+ *	SPDM/DMSA/Y   	dem_a->ne_y_gp
+ *	SPDM/DMSA/X   	dem_a->se_x_gp
+ *	SPDM/DMSA/Y   	dem_a->se_y_gp
+ *	DDOM/DDOM/DVAL	dem_a->void_fill
+ *	DDOM/DDOM/DVAL	dem_a->edge_fill
+ *	DDOM/DDOM/DVAL	dem_a->min_elev
+ *	DDOM/DDOM/DVAL	dem_a->max_elev
+ *	NONE          	dem_a->angle			Old-format field not encoded in SDTS.  Not in use.  Assumed to be always 0.
+ *	IREF/IREF/XHRS	dem_a->x_res
+ *	IREF/IREF/YHRS	dem_a->y_res
+ *	IREF/IREF/SFAX	x_scale_factor
+ *	IREF/IREF/SFAY	y_scale_factor
+ *	DDSH/DDSH/UNIT	dem_a->elev_units
+ *	DDSH/DDSH/PREC	dem_a->z_res
+ *	LDEF/LDEF/NROW	dem_a->rows			see also RSDF/RSDF/RWXT (row extent)
+ *	LDEF/LDEF/NCOL	dem_a->cols			see also RSDF/RSDF/CLXT (col extent)
+ *	RSDF/SADR/X   	dem_a->x_gp_first		First elevation in profile (profiles in SDTS run from W to E and are padded to full row length)
+ *	RSDF/SADR/Y   	dem_a->y_gp_first		First elevation in profile (profiles in SDTS run from W to E and are padded to full row length)
+ *	CELL/CVLS/ELEVATION				An array of elevations (the actual DEM data)
+ */
+int
+parse_dem_sdts(unsigned char *passed_file_name, struct dem_record_type_a *dem_a,
+		struct dem_record_type_c *dem_c, struct datum *dem_datum, long gz_flag)
+{
+	long i;
+	long layer;
+	long file_name_length;
+	char file_name[MAX_FILE_NAME];
+	long byte_order;
+	long upper_case_flag;
+	long need;
+	struct subfield subfield;
+	char save_byte;
+	long record_id;
+	double x_scale_factor, y_scale_factor;
+	double x_origin, y_origin;
+	char *ptr;
+
+
+	/*
+	 * Make a copy of the file name.  The one we were originally
+	 * given is still stored in the command line arguments.
+	 * It is probably a good idea not to alter those, lest we
+	 * scribble something we don't want to scribble.
+	 */
+	strncpy(file_name, passed_file_name, MAX_FILE_NAME - 1);
+	file_name[MAX_FILE_NAME - 1] = '\0';
+	if ((file_name_length = strlen(file_name)) < 12)  {
+		/*
+		 * Excluding the initial path, the file name should have the form
+		 * ????CE??.DDF, perhaps with a ".gz" on the end.  If it isn't
+		 * at least long enough to have this form, then reject it.
+		 */
+		fprintf(stderr, "File name %s doesn't look right.\n", file_name);
+		return 1;
+	}
+	/* Check the case of the characters in the file name by examining a single character. */
+	if (gz_flag == 0)  {
+		if (file_name[file_name_length - 1] == 'f')  {
+			upper_case_flag = 0;
+		}
+		else  {
+			upper_case_flag = 1;
+		}
+	}
+	else  {
+		if (file_name[file_name_length - 4] == 'f')  {
+			upper_case_flag = 0;
+		}
+		else  {
+			upper_case_flag = 1;
+		}
+	}
+
+
+	/*
+	 * Parse all of the information that we care about.
+	 * For now, don't waste time parsing things that aren't
+	 * currently interesting.
+	 *
+	 * Even to get the few things we care about, we need to
+	 * examine several files.
+	 *
+	 * There are a lot of comments in dem.h describing the various
+	 * data items, so this block of code is presented largely
+	 * sans comments.
+	 *
+	 * Most of this routine is basically the same block of code,
+	 * over and over, as we read a succession of files to get
+	 * the data we need.
+	 */
+
+	/* Begin by finding the native byte-order on this machine. */
+	byte_order = swab_type();
+
+
+
+	/*
+	 * The first file name we need is the IDEN module, which contains
+	 * the DEM title.
+	 */
+	if (upper_case_flag == 0)  {
+		if (gz_flag != 0)  {
+			strncpy(&file_name[file_name_length - 11], "iden.ddf", 8);
+		}
+		else  {
+			strncpy(&file_name[file_name_length - 8], "iden.ddf", 8);
+		}
+	}
+	else  {
+		if (gz_flag != 0)  {
+			strncpy(&file_name[file_name_length - 11], "IDEN.DDF", 8);
+		}
+		else  {
+			strncpy(&file_name[file_name_length - 8], "IDEN.DDF", 8);
+		}
+	}
+	/*
+	 * Open the file in preparation for parsing.
+	 */
+	if (begin_ddf(file_name) < 0)  {
+		fprintf(stderr, "Can't open %s for reading, errno = %d\n", file_name, errno);
+		exit(0);
+	}
+	/*
+	 * Loop through the subfields until we find what we want.
+	 */
+	need = 1;
+	while (get_subfield(&subfield) != 0)  {
+		if (strcmp(subfield.tag, "IDEN") == 0)  {
+			if ((strstr(subfield.format, "A") != (char *)0) && (strcmp(subfield.label, "TITL") == 0))  {
+				strncpy(dem_a->title, subfield.value, subfield.length);
+				dem_a->title[subfield.length] = '\0';
+
+				/* This is all we need.  Break out of the loop. */
+				need--;
+				break;
+			}
+		}
+	}
+	/* We are done with this file, so close it. */
+	end_ddf();
+	/* Check that we found what we wanted. */
+	if (need > 0)  {
+		fprintf(stderr, "Failed to get needed data from file %s.\n", file_name);
+		return 1;
+	}
+
+
+
+	/*
+	 * The next file name we need is the DQPA module, which contains
+	 * some quality information, including the DEM level and some Root
+	 * Mean Square Error (RMSE) statistics.
+	 */
+	if (upper_case_flag == 0)  {
+		if (gz_flag != 0)  {
+			strncpy(&file_name[file_name_length - 11], "dqpa.ddf", 8);
+		}
+		else  {
+			strncpy(&file_name[file_name_length - 8], "dqpa.ddf", 8);
+		}
+	}
+	else  {
+		if (gz_flag != 0)  {
+			strncpy(&file_name[file_name_length - 11], "DQPA.DDF", 8);
+		}
+		else  {
+			strncpy(&file_name[file_name_length - 8], "DQPA.DDF", 8);
+		}
+	}
+	/*
+	 * Open the file in preparation for parsing.
+	 */
+	if (begin_ddf(file_name) < 0)  {
+		fprintf(stderr, "Can't open %s for reading, errno = %d\n", file_name, errno);
+		exit(0);
+	}
+	/*
+	 * Loop through the subfields until we find what we want.
+	 */
+	need = 2;
+	record_id = -1;
+	while (get_subfield(&subfield) != 0)  {
+		if (strcmp(subfield.tag, "DQPA") == 0)  {
+			if ((strstr(subfield.format, "I") != (char *)0) && (strcmp(subfield.label, "RCID") == 0))  {
+				save_byte = subfield.value[subfield.length]; subfield.value[subfield.length] = '\0';
+				record_id = strtol(subfield.value, (char **)0, 10);
+				subfield.value[subfield.length] = save_byte;
+			}
+			else if ((record_id == 1) && (strstr(subfield.format, "A") != (char *)0) && (strcmp(subfield.label, "COMT") == 0))  {
+				dem_a->level_code = -1;
+				if (subfield.length > 10)  {
+					if (strncmp(subfield.value, "DEM LEVEL ", 10) == 0)  {
+						/*
+						 * Since, in SDTS, the DEM level is part of a line of commentary,
+						 * it is hard to be sure we have the correct value.  Give a
+						 * reasonable try at finding it, since it always appears to be
+						 * at the same place in the comment, but don't get too worked up
+						 * about it, since it isn't that crucial that we know it.
+						 */
+						dem_a->level_code = subfield.value[10] - '0';
+					}
+				}
+
+				need--;
+			}
+			else if ((record_id == 2) && (strstr(subfield.format, "A") != (char *)0) && (strcmp(subfield.label, "COMT") == 0))  {
+				/*
+				 * The RMSE statistics are hard to dig out, because they are embedded
+				 * in a long text string that can come in several basic forms.
+				 * Thus, this block of code is complicated.
+				 */
+				save_byte = subfield.value[subfield.length]; subfield.value[subfield.length] = '\0';
+				if (strncmp(subfield.value, "ACCURACY: Unspecified", strlen("ACCURACY: Unspecified")) == 0)  {
+					dem_a->accuracy = 0;
+				}
+				else  {
+					dem_c->datum_sample_size = 0;
+					dem_c->datum_rmse_x = 0;
+					dem_c->datum_rmse_y = 0;
+					dem_c->datum_rmse_z = 0;
+					dem_c->datum_stats_flag = 0;
+					if ((ptr = strstr(subfield.value, "absolute datum (x")) != (char *)0)  {
+						i = ptr - subfield.value + strlen("absolute datum (x");
+						if ((ptr = strstr(&subfield.value[i], "(")) != (char *)0)  {
+							ptr++;
+							if (((*ptr >= '0') && (*ptr <= '9')) || (*ptr == ' '))  {
+								dem_c->datum_rmse_x = strtol(ptr, &ptr, 10);
+								if ((*ptr == ',') || (*ptr == ' '))  {
+									ptr++;
+									if (((*ptr >= '0') && (*ptr <= '9')) || (*ptr == ' '))  {
+										dem_c->datum_rmse_y = strtol(ptr, &ptr, 10);
+										if ((*ptr == ',') || (*ptr == ' '))  {
+											ptr++;
+											dem_c->datum_rmse_z = strtol(ptr, &ptr, 10);
+											if ((ptr = strstr(ptr, "); ")) != (char *)0)  {
+												ptr = ptr + 3;
+												if (strncmp(ptr, "accuracy is estimated", strlen("accuracy is estimated")) == 0)  {
+													dem_c->datum_stats_flag = 1;
+												}
+												else if (strncmp(ptr, "accuracy has been", strlen("accuracy has been")) == 0)  {
+													if ((ptr = strstr(ptr, "size of ")) != (char *)0)  {
+														ptr += strlen("size of ");
+														dem_c->datum_sample_size = strtol(ptr, (char **)0, 10);
+														dem_c->datum_stats_flag = 1;
+													}
+												}
+											}
+										}
+									}
+								}
+							}
+						}
+					}
+
+					dem_c->dem_sample_size = 0;
+					dem_c->dem_rmse_x = 0;
+					dem_c->dem_rmse_y = 0;
+					dem_c->dem_rmse_z = 0;
+					dem_c->dem_stats_flag = 0;
+					if ((ptr = strstr(subfield.value, "file's datum (x")) != (char *)0)  {
+						i = ptr - subfield.value + strlen("file's datum (x");
+						if ((ptr = strstr(&subfield.value[i], "(")) != (char *)0)  {
+							ptr++;
+							if (((*ptr >= '0') && (*ptr <= '9')) || (*ptr == ' '))  {
+								dem_c->dem_rmse_x = strtol(ptr, &ptr, 10);
+								if ((*ptr == ',') || (*ptr == ' '))  {
+									ptr++;
+									if (((*ptr >= '0') && (*ptr <= '9')) || (*ptr == ' '))  {
+										dem_c->dem_rmse_y = strtol(ptr, &ptr, 10);
+										if ((*ptr == ',') || (*ptr == ' '))  {
+											ptr++;
+											dem_c->dem_rmse_z = strtol(ptr, &ptr, 10);
+											if ((ptr = strstr(ptr, "); ")) != (char *)0)  {
+												ptr = ptr + 3;
+												if (strncmp(ptr, "accuracy is estimated", strlen("accuracy is estimated")) == 0)  {
+													dem_c->dem_stats_flag = 1;
+												}
+												else if (strncmp(ptr, "accuracy has been", strlen("accuracy has been")) == 0)  {
+													if ((ptr = strstr(ptr, "size of ")) != (char *)0)  {
+														ptr += strlen("size of ");
+														dem_c->dem_sample_size = strtol(ptr, (char **)0, 10);
+														dem_c->dem_stats_flag = 1;
+													}
+												}
+											}
+										}
+									}
+								}
+							}
+						}
+					}
+
+					dem_a->accuracy = 1;
+				}
+				subfield.value[subfield.length] = save_byte;
+
+				need--;
+			}
+			if (need == 0)  {
+				/* This is all we need.  Break out of the loop. */
+				break;
+			}
+		}
+	}
+	/* We are done with this file, so close it. */
+	end_ddf();
+	/* Check that we found what we wanted. */
+	if (need > 0)  {
+		fprintf(stderr, "Failed to get needed data from file %s.\n", file_name);
+		return 1;
+	}
+	else if (need < 0)  {
+		fprintf(stderr, "Warning:  Got more data from file %s than expected.\n", file_name);
+	}
+
+
+
+	/*
+	 * The next file name we need is the XREF module, which contains
+	 * information relating to the planimetric reference system, zone, and datum.
+	 */
+	if (upper_case_flag == 0)  {
+		if (gz_flag != 0)  {
+			strncpy(&file_name[file_name_length - 11], "xref.ddf", 8);
+		}
+		else  {
+			strncpy(&file_name[file_name_length - 8], "xref.ddf", 8);
+		}
+	}
+	else  {
+		if (gz_flag != 0)  {
+			strncpy(&file_name[file_name_length - 11], "XREF.DDF", 8);
+		}
+		else  {
+			strncpy(&file_name[file_name_length - 8], "XREF.DDF", 8);
+		}
+	}
+	/*
+	 * Open the file in preparation for parsing.
+	 */
+	if (begin_ddf(file_name) < 0)  {
+		fprintf(stderr, "Can't open %s for reading, errno = %d\n", file_name, errno);
+		exit(0);
+	}
+	/*
+	 * Loop through the subfields until we find what we want.
+	 */
+	dem_a->vertical_datum_shift = 0.0;	// Set this one in case we don't find it.
+	need = 5;
+	while (get_subfield(&subfield) != 0)  {
+		if (strcmp(subfield.tag, "XREF") == 0)  {
+			if ((strstr(subfield.format, "A") != (char *)0) && (strcmp(subfield.label, "RSNM") == 0))  {
+				/*
+				 * Valid choices are "UTM", "GEO", or "SPCS"
+				 */
+				if (strncmp(subfield.value, "GEO", subfield.length) == 0)  {
+					dem_a->plane_ref = 0;
+					dem_a->plane_units = 3;				// For GEO, it is decimal degrees.  Cheat a bit, by putting in the old code for arc-seconds.
+				}
+				else if (strncmp(subfield.value, "UTM", subfield.length) == 0)  {
+					dem_a->plane_ref = 1;
+					dem_a->plane_units = 2;
+				}
+				else if (strncmp(subfield.value, "SPCS", subfield.length) == 0)  {
+					dem_a->plane_ref = 2;
+					dem_a->plane_units = 2;
+				}
+				else  {
+					dem_a->plane_ref = -1;
+					dem_a->plane_units = -1;
+				}
+				need--;
+			}
+			else if ((strstr(subfield.format, "A") != (char *)0) && (strcmp(subfield.label, "ZONE") == 0))  {
+				save_byte = subfield.value[subfield.length]; subfield.value[subfield.length] = '\0';
+				dem_a->zone = strtol(subfield.value, (char **)0, 10);
+				subfield.value[subfield.length] = save_byte;
+				need--;
+			}
+			else if ((strstr(subfield.format, "A") != (char *)0) && (strcmp(subfield.label, "HDAT") == 0))  {
+				/*
+				 * Valid choices are "NAS" for NAD-27, "WGC" for WGS-72, "WGE" for WGS-84, "NAX" for NAD-83,
+				 * "OHD" for old Hawaii, and "PRD" for Puerto Rico.
+				 */
+				if (strncmp(subfield.value, "NAS", subfield.length) == 0)  {
+					dem_a->horizontal_datum = 1;
+				}
+				else if (strncmp(subfield.value, "WGC", subfield.length) == 0)  {
+					dem_a->horizontal_datum = 2;
+				}
+				else if (strncmp(subfield.value, "WGE", subfield.length) == 0)  {
+					dem_a->horizontal_datum = 3;
+				}
+				else if (strncmp(subfield.value, "NAX", subfield.length) == 0)  {
+					dem_a->horizontal_datum = 4;
+				}
+				else if (strncmp(subfield.value, "OHD", subfield.length) == 0)  {
+					dem_a->horizontal_datum = 5;
+				}
+				else if (strncmp(subfield.value, "PRD", subfield.length) == 0)  {
+					dem_a->horizontal_datum = 6;
+				}
+				else  {
+					dem_a->horizontal_datum = -1;
+				}
+				need--;
+			}
+			else if ((strstr(subfield.format, "A") != (char *)0) && (strcmp(subfield.label, "COMT") == 0))  {
+				if ((subfield.length > 0) && ((subfield.value[0] == 'V') || (subfield.value[0] == 'v')))  {
+					for (i = 20; (i < 30) && (i < subfield.length); i++)  {
+						if ((subfield.value[i] >= '0') && (subfield.value[i] <= '9'))  {
+							dem_a->vertical_datum_shift = strtod(&subfield.value[i], (char **)0);
+							break;
+						}
+					}
+				}
+				need--;
+			}
+			if (need == 0)  {
+				/* This is all we need.  Break out of the loop. */
+				break;
+			}
+		}
+		if (strcmp(subfield.tag, "VATT") == 0)  {
+			if ((strstr(subfield.format, "A") != (char *)0) && (strcmp(subfield.label, "VDAT") == 0))  {
+				/*
+				 * Valid choices are "NAS" for NAD-27, "WGC" for WGS-72, "WGE" for WGS-84, "NAX" for NAD-83,
+				 * "OHD" for old Hawaii, and "PRD" for Puerto Rico.
+				 */
+				if (strncmp(subfield.value, "LMSL", subfield.length) == 0)  {
+					dem_a->vertical_datum = 1;
+				}
+				else if (strncmp(subfield.value, "NGVD", subfield.length) == 0)  {
+					dem_a->vertical_datum = 2;
+				}
+				else if (strncmp(subfield.value, "NAVD", subfield.length) == 0)  {
+					dem_a->vertical_datum = 3;
+				}
+				else  {
+					dem_a->vertical_datum = -1;
+				}
+				need--;
+			}
+			if (need == 0)  {
+				/* This is all we need.  Break out of the loop. */
+				break;
+			}
+		}
+	}
+	/* We are done with this file, so close it. */
+	end_ddf();
+	/* Check that we found what we wanted. */
+	if (need > 0)  {
+		fprintf(stderr, "Failed to get needed data from file %s.\n", file_name);
+		return 1;
+	}
+	else if (need < 0)  {
+		fprintf(stderr, "Warning:  Got more data from file %s than expected.\n", file_name);
+	}
+
+
+
+	/*
+	 * The next file name we need is the SPDM module, which contains
+	 * the UTM coordinates of the corners.
+	 */
+	if (upper_case_flag == 0)  {
+		if (gz_flag != 0)  {
+			strncpy(&file_name[file_name_length - 11], "spdm.ddf", 8);
+		}
+		else  {
+			strncpy(&file_name[file_name_length - 8], "spdm.ddf", 8);
+		}
+	}
+	else  {
+		if (gz_flag != 0)  {
+			strncpy(&file_name[file_name_length - 11], "SPDM.DDF", 8);
+		}
+		else  {
+			strncpy(&file_name[file_name_length - 8], "SPDM.DDF", 8);
+		}
+	}
+	/*
+	 * Open the file in preparation for parsing.
+	 */
+	if (begin_ddf(file_name) < 0)  {
+		fprintf(stderr, "Can't open %s for reading, errno = %d\n", file_name, errno);
+		exit(0);
+	}
+	/*
+	 * We also need the layer number from the file name.
+	 * Some SDTS transfers may contain multiple CE files,
+	 * and we need to pull the correct record out of the AHDR file.
+	 *
+	 * Actually, I haven't come across any SDTS DEMs yet that
+	 * have more than the ????CEL0.DDF file.  However, we include
+	 * all of this layer stuff on the premise that it should work
+	 * for existing files, and should also work if we come across
+	 * data with multiple layers.
+	 */
+	if (gz_flag != 0)  {
+	 	layer = strtol(&passed_file_name[file_name_length - 8], (char **)0, 10) + 1;
+	}
+	else  {
+		layer = strtol(&passed_file_name[file_name_length - 5], (char **)0, 10) + 1;
+	}
+	if (layer <= 0)  {
+		fprintf(stderr, "Got bad layer number (%d) from file %s.\n", layer, passed_file_name);
+		return 1;
+	}
+	/*
+	 * Loop through the subfields until we find what we want.
+	 */
+	need = 8;
+	while (get_subfield(&subfield) != 0)  {
+		if (strcmp(subfield.tag, "SPDM") == 0)  {
+			if ((strstr(subfield.format, "I") != (char *)0) && (strncmp(subfield.label, "RCID", 4) == 0))  {
+				/*
+				 * Check for the correct layer.
+				 * set layer = -1 as a flag if you find it.
+				 */
+				save_byte = subfield.value[subfield.length]; subfield.value[subfield.length] = '\0';
+				if (layer == strtol(subfield.value, (char **)0, 10))  {
+					layer = -1;
+				}
+				subfield.value[subfield.length] = save_byte;
+			}
+		}
+		else if ((layer < 0) && (strcmp(subfield.tag, "DMSA") == 0))  {
+			if ((strstr(subfield.format, "R") != (char *)0) && ((strcmp(subfield.label, "X") == 0) || (strcmp(subfield.label, "Y") == 0)))  {
+				save_byte = subfield.value[subfield.length]; subfield.value[subfield.length] = '\0';
+				switch (need)  {
+				case 8:
+					dem_a->sw_x_gp = strtod(subfield.value, (char **)0);
+					break;
+				case 7:
+					dem_a->sw_y_gp = strtod(subfield.value, (char **)0);
+					break;
+				case 6:
+					dem_a->nw_x_gp = strtod(subfield.value, (char **)0);
+					break;
+				case 5:
+					dem_a->nw_y_gp = strtod(subfield.value, (char **)0);
+					break;
+				case 4:
+					dem_a->ne_x_gp = strtod(subfield.value, (char **)0);
+					break;
+				case 3:
+					dem_a->ne_y_gp = strtod(subfield.value, (char **)0);
+					break;
+				case 2:
+					dem_a->se_x_gp = strtod(subfield.value, (char **)0);
+					break;
+				case 1:
+					dem_a->se_y_gp = strtod(subfield.value, (char **)0);
+					break;
+				}
+				need--;
+				subfield.value[subfield.length] = save_byte;
+			}
+			if (need == 0)  {
+				/* This is all we need.  Break out of the loop. */
+				break;
+			}
+		}
+	}
+	/* We are done with this file, so close it. */
+	end_ddf();
+	/* Check that we found what we wanted. */
+	if (need > 0)  {
+		fprintf(stderr, "Failed to get needed data from file %s.\n", file_name);
+		return 1;
+	}
+	else if (need < 0)  {
+		fprintf(stderr, "Warning:  Got more data from file %s than expected.\n", file_name);
+	}
+
+
+
+	/*
+	 * The next file name we need is the DDOM module, which contains
+	 * the elevation values for non-valid map areas, and
+	 * the minimum and maximum elevations.
+	 */
+	if (upper_case_flag == 0)  {
+		if (gz_flag != 0)  {
+			strncpy(&file_name[file_name_length - 11], "ddom.ddf", 8);
+		}
+		else  {
+			strncpy(&file_name[file_name_length - 8], "ddom.ddf", 8);
+		}
+	}
+	else  {
+		if (gz_flag != 0)  {
+			strncpy(&file_name[file_name_length - 11], "DDOM.DDF", 8);
+		}
+		else  {
+			strncpy(&file_name[file_name_length - 8], "DDOM.DDF", 8);
+		}
+	}
+	/*
+	 * Open the file in preparation for parsing.
+	 */
+	if (begin_ddf(file_name) < 0)  {
+		fprintf(stderr, "Can't open %s for reading, errno = %d\n", file_name, errno);
+		exit(0);
+	}
+	/*
+	 * Loop through the subfields until we find what we want.
+	 */
+	need = 4;
+	while (get_subfield(&subfield) != 0)  {
+		if (strcmp(subfield.tag, "DDOM") == 0)  {
+			/*
+			 * Some DEM files use "R" for these values instead of "I".
+			 * Thus, we must check for both.
+			 */
+			if ((strstr(subfield.format, "I") != (char *)0) && (strcmp(subfield.label, "DVAL") == 0))  {
+				save_byte = subfield.value[subfield.length]; subfield.value[subfield.length] = '\0';
+				switch (need)  {
+				case 4:
+					dem_a->void_fill = strtol(subfield.value, (char **)0, 10);
+					break;
+				case 3:
+					dem_a->edge_fill = strtol(subfield.value, (char **)0, 10);
+					break;
+				case 2:
+					dem_a->min_elev = strtol(subfield.value, (char **)0, 10);
+					break;
+				case 1:
+					dem_a->max_elev = strtol(subfield.value, (char **)0, 10);
+					break;
+				}
+				need--;
+				subfield.value[subfield.length] = save_byte;
+			}
+			else if ((strstr(subfield.format, "R") != (char *)0) && (strcmp(subfield.label, "DVAL") == 0))  {
+				save_byte = subfield.value[subfield.length]; subfield.value[subfield.length] = '\0';
+				switch (need)  {
+				case 4:
+					dem_a->void_fill = round(strtod(subfield.value, (char **)0));
+					break;
+				case 3:
+					dem_a->edge_fill = round(strtod(subfield.value, (char **)0));
+					break;
+				case 2:
+					dem_a->min_elev = round(strtod(subfield.value, (char **)0));
+					break;
+				case 1:
+					dem_a->max_elev = round(strtod(subfield.value, (char **)0));
+					break;
+				}
+				need--;
+				subfield.value[subfield.length] = save_byte;
+			}
+			if (need == 0)  {
+				/* This is all we need.  Break out of the loop. */
+				break;
+			}
+		}
+	}
+	/* We are done with this file, so close it. */
+	end_ddf();
+	/* Check that we found what we wanted. */
+	if (need > 0)  {
+		fprintf(stderr, "Failed to get needed data from file %s.\n", file_name);
+		return 1;
+	}
+	else if (need < 0)  {
+		fprintf(stderr, "Warning:  Got more data from file %s than expected.\n", file_name);
+	}
+
+
+
+	/*
+	 * The next file name we need is the IREF module, which contains
+	 * the horizontal x and y resolution, and the x and y scale factors
+	 * for UTM coordinates.  (Some UTM coordinates are stored as binary integer
+	 * values, and then multiplied by the scale factors after conversion
+	 * to floating point.  This is true for the location of the first point
+	 * in the first profile, stored in RSDF/SADR/X, RSDF/SADR/Y.  However,
+	 * the coordinates of the four corners of the map are stored as real
+	 * numbers in SPDM/DMSA/X, SPDM/DMSA/Y.)
+	 */
+	if (upper_case_flag == 0)  {
+		if (gz_flag != 0)  {
+			strncpy(&file_name[file_name_length - 11], "iref.ddf", 8);
+		}
+		else  {
+			strncpy(&file_name[file_name_length - 8], "iref.ddf", 8);
+		}
+	}
+	else  {
+		if (gz_flag != 0)  {
+			strncpy(&file_name[file_name_length - 11], "IREF.DDF", 8);
+		}
+		else  {
+			strncpy(&file_name[file_name_length - 8], "IREF.DDF", 8);
+		}
+	}
+	/*
+	 * Open the file in preparation for parsing.
+	 */
+	if (begin_ddf(file_name) < 0)  {
+		fprintf(stderr, "Can't open %s for reading, errno = %d\n", file_name, errno);
+		exit(0);
+	}
+	/*
+	 * Loop through the subfields until we find what we want.
+	 */
+	need = 6;
+	while (get_subfield(&subfield) != 0)  {
+		if (strcmp(subfield.tag, "IREF") == 0)  {
+			save_byte = subfield.value[subfield.length]; subfield.value[subfield.length] = '\0';
+			if ((strstr(subfield.format, "R") != (char *)0) && (strcmp(subfield.label, "XHRS") == 0))  {
+				dem_a->x_res = strtod(subfield.value, (char **)0);
+				need--;
+			}
+			else if ((strstr(subfield.format, "R") != (char *)0) && (strcmp(subfield.label, "YHRS") == 0))  {
+				dem_a->y_res = strtod(subfield.value, (char **)0);
+				need--;
+			}
+			else if ((strstr(subfield.format, "R") != (char *)0) && (strcmp(subfield.label, "SFAX") == 0))  {
+				x_scale_factor = strtod(subfield.value, (char **)0);
+				need--;
+			}
+			else if ((strstr(subfield.format, "R") != (char *)0) && (strcmp(subfield.label, "SFAY") == 0))  {
+				y_scale_factor = strtod(subfield.value, (char **)0);
+				need--;
+			}
+			else if ((strstr(subfield.format, "R") != (char *)0) && (strcmp(subfield.label, "XORG") == 0))  {
+				x_origin = strtod(subfield.value, (char **)0);
+				need--;
+			}
+			else if ((strstr(subfield.format, "R") != (char *)0) && (strcmp(subfield.label, "YORG") == 0))  {
+				y_origin = strtod(subfield.value, (char **)0);
+				need--;
+			}
+			subfield.value[subfield.length] = save_byte;
+			if (need == 0)  {
+				/* This is all we need.  Break out of the loop. */
+				break;
+			}
+		}
+	}
+	/* We are done with this file, so close it. */
+	end_ddf();
+	/* Check that we found what we wanted. */
+	if (need > 0)  {
+		fprintf(stderr, "Failed to get needed data from file %s.\n", file_name);
+		return 1;
+	}
+	else if (need < 0)  {
+		fprintf(stderr, "Warning:  Got more data from file %s than expected.\n", file_name);
+	}
+
+
+
+	/*
+	 * The next file name we need is the DDSH module, which contains
+	 * the elevation units and the vertical resolution.
+	 */
+	if (upper_case_flag == 0)  {
+		if (gz_flag != 0)  {
+			strncpy(&file_name[file_name_length - 11], "ddsh.ddf", 8);
+		}
+		else  {
+			strncpy(&file_name[file_name_length - 8], "ddsh.ddf", 8);
+		}
+	}
+	else  {
+		if (gz_flag != 0)  {
+			strncpy(&file_name[file_name_length - 11], "DDSH.DDF", 8);
+		}
+		else  {
+			strncpy(&file_name[file_name_length - 8], "DDSH.DDF", 8);
+		}
+	}
+	/*
+	 * Open the file in preparation for parsing.
+	 */
+	if (begin_ddf(file_name) < 0)  {
+		fprintf(stderr, "Can't open %s for reading, errno = %d\n", file_name, errno);
+		exit(0);
+	}
+	/*
+	 * Loop through the subfields until we find what we want.
+	 */
+	need = 2;
+	while (get_subfield(&subfield) != 0)  {
+		if (strcmp(subfield.tag, "DDSH") == 0)  {
+			if ((strstr(subfield.format, "A") != (char *)0) && (strcmp(subfield.label, "UNIT") == 0))  {
+				if (strncmp(subfield.value, "FEET", subfield.length) == 0)  {
+					dem_a->elev_units = 1;
+				}
+				else if (strncmp(subfield.value, "METERS", subfield.length) == 0)  {
+					dem_a->elev_units = 2;
+				}
+				else  {
+					dem_a->elev_units = -1;
+				}
+				need--;
+			}
+			else if ((strstr(subfield.format, "R") != (char *)0) && (strcmp(subfield.label, "PREC") == 0))  {
+				save_byte = subfield.value[subfield.length]; subfield.value[subfield.length] = '\0';
+				dem_a->z_res = strtod(subfield.value, (char **)0);
+				subfield.value[subfield.length] = save_byte;
+				need--;
+			}
+			if (need == 0)  {
+				/* This is all we need.  Break out of the loop. */
+				break;
+			}
+		}
+	}
+	/* We are done with this file, so close it. */
+	end_ddf();
+	/* Check that we found what we wanted. */
+	if (need > 0)  {
+		fprintf(stderr, "Failed to get needed data from file %s.\n", file_name);
+		return 1;
+	}
+	else if (need < 0)  {
+		fprintf(stderr, "Warning:  Got more data from file %s than expected.\n", file_name);
+	}
+
+
+
+	/*
+	 * The next file name we need is the LDEF module, which contains
+	 * the numbers of rows and columns.
+	 */
+	if (upper_case_flag == 0)  {
+		if (gz_flag != 0)  {
+			strncpy(&file_name[file_name_length - 11], "ldef.ddf", 8);
+		}
+		else  {
+			strncpy(&file_name[file_name_length - 8], "ldef.ddf", 8);
+		}
+	}
+	else  {
+		if (gz_flag != 0)  {
+			strncpy(&file_name[file_name_length - 11], "LDEF.DDF", 8);
+		}
+		else  {
+			strncpy(&file_name[file_name_length - 8], "LDEF.DDF", 8);
+		}
+	}
+	/*
+	 * Open the file in preparation for parsing.
+	 */
+	if (begin_ddf(file_name) < 0)  {
+		fprintf(stderr, "Can't open %s for reading, errno = %d\n", file_name, errno);
+		exit(0);
+	}
+	/*
+	 * Loop through the subfields until we find what we want.
+	 */
+	need = 2;
+	while (get_subfield(&subfield) != 0)  {
+		if (strcmp(subfield.tag, "LDEF") == 0)  {
+			if ((strstr(subfield.format, "I") != (char *)0) && (strcmp(subfield.label, "NROW") == 0))  {
+				save_byte = subfield.value[subfield.length]; subfield.value[subfield.length] = '\0';
+				dem_a->rows = strtol(subfield.value, (char **)0, 10);
+				subfield.value[subfield.length] = save_byte;
+				need--;
+			}
+			else if ((strstr(subfield.format, "I") != (char *)0) && (strcmp(subfield.label, "NCOL") == 0))  {
+				save_byte = subfield.value[subfield.length]; subfield.value[subfield.length] = '\0';
+				dem_a->cols = strtol(subfield.value, (char **)0, 10);
+				subfield.value[subfield.length] = save_byte;
+				need--;
+			}
+			if (need == 0)  {
+				/* This is all we need.  Break out of the loop. */
+				break;
+			}
+		}
+	}
+	/* We are done with this file, so close it. */
+	end_ddf();
+	/* Check that we found what we wanted. */
+	if (need > 0)  {
+		fprintf(stderr, "Failed to get needed data from file %s.\n", file_name);
+		return 1;
+	}
+	else if (need < 0)  {
+		fprintf(stderr, "Warning:  Got more data from file %s than expected.\n", file_name);
+	}
+
+
+
+	/*
+	 * The next file name we need is the RSDF module, which contains
+	 * the (x_gp, y_gp) coordinates of the first profile in the DEM,
+	 * and the elevation pattern parameter.
+	 */
+	if (upper_case_flag == 0)  {
+		if (gz_flag != 0)  {
+			strncpy(&file_name[file_name_length - 11], "rsdf.ddf", 8);
+		}
+		else  {
+			strncpy(&file_name[file_name_length - 8], "rsdf.ddf", 8);
+		}
+	}
+	else  {
+		if (gz_flag != 0)  {
+			strncpy(&file_name[file_name_length - 11], "RSDF.DDF", 8);
+		}
+		else  {
+			strncpy(&file_name[file_name_length - 8], "RSDF.DDF", 8);
+		}
+	}
+	/*
+	 * Open the file in preparation for parsing.
+	 */
+	if (begin_ddf(file_name) < 0)  {
+		fprintf(stderr, "Can't open %s for reading, errno = %d\n", file_name, errno);
+		exit(0);
+	}
+	/*
+	 * Loop through the subfields until we find what we want.
+	 */
+	need = 3;
+	while (get_subfield(&subfield) != 0)  {
+		if (strcmp(subfield.tag, "SADR") == 0)  {
+			/*
+			 * These two entries are special because they contain UTM coordinates which, unlike
+			 * other numeric values, are stored in two's-complement binary format, rather than the real
+			 * 'R' format that we might otherwise expect.  This type of storage is okay, for this
+			 * application because the DEM coordinates are always round multiples of 10 or 30, and
+			 * hence have no fractional component.  This binary data must be swabbed,
+			 * if necessary, during the conversion to internal format.
+			 */
+			if ((strstr(subfield.format, "B") != (char *)0) && (strcmp(subfield.label, "X") == 0))  {
+				if (subfield.length != 4)  {
+					/* Error */
+					dem_a->x_gp_first = -1.0;
+				}
+				else  {
+					i = (((long)subfield.value[3] & 0xff) << 24) |
+					    (((long)subfield.value[2] & 0xff) << 16) |
+					    (((long)subfield.value[1] & 0xff) <<  8) |
+					     ((long)subfield.value[0] & 0xff);
+					if (byte_order == 0)  {
+						dem_a->x_gp_first = (double)i;
+					}
+					else if (byte_order == 1)  {
+						LE_SWAB(&i);
+						dem_a->x_gp_first = (double)i;
+					}
+					else if (byte_order == 2)  {
+						PDP_SWAB(&i);
+						dem_a->x_gp_first = (double)i;
+					}
+				}
+				/*
+				 * Multiply the UTM coordinates by the scale factors,
+				 * and add in the x and y origins.
+				 * We don't need to do this for the UTM coordinates of the
+				 * map corners because they are stored in real-number format
+				 * rather than binary format.
+				 *
+				 * For some files, x_gp_first and y_gp_first are stored in real-number
+				 * format also.  We handle those files below.
+				 */
+				dem_a->x_gp_first = x_scale_factor * dem_a->x_gp_first + x_origin;
+				/*
+				 * The USGS apparently didn't put in the location of the first
+				 * elevation in the first profile, but rather just put in the location
+				 * of the top left corner of the map area.  Thus, we need to round
+				 * these two values to round multiples of dem_a->x_res and dem_a->y_res.
+				 * The y value must be rounded down and the x value must be rounded up,
+				 * so that the point will be inside the northwest corner of the map area.
+				 */
+				dem_a->x_gp_first = ceil(dem_a->x_gp_first / dem_a->x_res) * dem_a->x_res;
+				need--;
+			}
+			else if ((strstr(subfield.format, "B") != (char *)0) && (strcmp(subfield.label, "Y") == 0))  {
+				if (subfield.length != 4)  {
+					/* Error */
+					dem_a->y_gp_first = -1.0;
+				}
+				else  {
+					i = (((long)subfield.value[3] & 0xff) << 24) |
+					    (((long)subfield.value[2] & 0xff) << 16) |
+					    (((long)subfield.value[1] & 0xff) <<  8) |
+					     ((long)subfield.value[0] & 0xff);
+					if (byte_order == 0)  {
+						dem_a->y_gp_first = (double)i;
+					}
+					else if (byte_order == 1)  {
+						LE_SWAB(&i);
+						dem_a->y_gp_first = (double)i;
+					}
+					else if (byte_order == 2)  {
+						PDP_SWAB(&i);
+						dem_a->y_gp_first = (double)i;
+					}
+				}
+				/*
+				 * Multiply the UTM coordinates by the scale factors,
+				 * and add in the x and y origins.
+				 * We don't need to do this for the UTM coordinates of the
+				 * map corners because they are stored in real-number format
+				 * rather than binary format.
+				 *
+				 * For some files, x_gp_first and y_gp_first are stored in real-number
+				 * format also.  We handle those files below.
+				 */
+				dem_a->y_gp_first = y_scale_factor * dem_a->y_gp_first + y_origin;
+				/*
+				 * The USGS apparently didn't put in the location of the first
+				 * elevation in the first profile, but rather just put in the location
+				 * of the top left corner of the map area.  Thus, we need to round
+				 * these two values to round multiples of dem_a->x_res and dem_a->y_res.
+				 * The y value must be rounded down and the x value must be rounded up,
+				 * so that the point will be inside the northwest corner of the map area.
+				 */
+				dem_a->y_gp_first = floor(dem_a->y_gp_first / dem_a->y_res) * dem_a->y_res;
+				need--;
+			}
+			/*
+			 * In my experience, most files store X and Y as binary values.
+			 * However, a few files use the "R" format, so we also check for it
+			 * here.
+			 */
+			else if ((strstr(subfield.format, "R") != (char *)0) && (strcmp(subfield.label, "X") == 0))  {
+				dem_a->x_gp_first = strtod(subfield.value, (char **)0);
+				need--;
+			}
+			else if ((strstr(subfield.format, "R") != (char *)0) && (strcmp(subfield.label, "Y") == 0))  {
+				dem_a->y_gp_first = strtod(subfield.value, (char **)0);
+				need--;
+			}
+		}
+		else if (strcmp(subfield.tag, "RSDF") == 0)  {
+			if ((strstr(subfield.format, "A") != (char *)0) && (strcmp(subfield.label, "OBRP") == 0))  {
+				if ((subfield.length == 2) && (subfield.value[0] == 'G') && (subfield.value[1] == '2'))  {
+					dem_a->elevation_pattern = 1;	// regular
+				}
+				else if ((subfield.length == 0) || (subfield.value[0] == ' '))  {
+					dem_a->elevation_pattern = 2;	// random
+				}
+				need--;
+			}
+		}
+		if (need == 0)  {
+			/* This is all we need.  Break out of the loop. */
+			break;
+		}
+	}
+	/* We are done with this file, so close it. */
+	end_ddf();
+	/* Check that we found what we wanted. */
+	if (need > 0)  {
+		fprintf(stderr, "Failed to get needed data from file %s.\n", file_name);
+		return 1;
+	}
+	else if (need < 0)  {
+		fprintf(stderr, "Warning:  Got more data from file %s than expected.\n", file_name);
+	}
+
+
+	if ((dem_a->horizontal_datum == -1) || (dem_a->horizontal_datum == 1))  {
+		/* The datum is NAD-27.  Initialize the parameters. */
+		dem_datum->a = NAD27_SEMIMAJOR;
+		dem_datum->b = NAD27_SEMIMINOR;
+		dem_datum->e_2 = NAD27_E_SQUARED;
+		dem_datum->f_inv = NAD27_F_INV;
+		dem_datum->k0 = UTM_K0;
+		dem_datum->a0 = NAD27_A0;
+		dem_datum->a2 = NAD27_A2;
+		dem_datum->a4 = NAD27_A4;
+		dem_datum->a6 = NAD27_A6;
+	}
+	else if (dem_a->horizontal_datum == 3)  {
+		/* The datum is WGS-84.  Initialize the parameters. */
+		dem_datum->a = WGS84_SEMIMAJOR;
+		dem_datum->b = WGS84_SEMIMINOR;
+		dem_datum->e_2 = WGS84_E_SQUARED;
+		dem_datum->f_inv = WGS84_F_INV;
+		dem_datum->k0 = UTM_K0;
+		dem_datum->a0 = WGS84_A0;
+		dem_datum->a2 = WGS84_A2;
+		dem_datum->a4 = WGS84_A4;
+		dem_datum->a6 = WGS84_A6;
+	}
+	else if (dem_a->horizontal_datum == 4)  {
+		/* The datum is NAD-83.  Initialize the parameters. */
+		dem_datum->a = NAD83_SEMIMAJOR;
+		dem_datum->b = NAD83_SEMIMINOR;
+		dem_datum->e_2 = NAD83_E_SQUARED;
+		dem_datum->f_inv = NAD83_F_INV;
+		dem_datum->k0 = UTM_K0;
+		dem_datum->a0 = NAD83_A0;
+		dem_datum->a2 = NAD83_A2;
+		dem_datum->a4 = NAD83_A4;
+		dem_datum->a6 = NAD83_A6;
+	}
+	else  {
+		/* We don't handle any other datums yet.  Default to NAD-27. */
+		dem_datum->a = NAD27_SEMIMAJOR;
+		dem_datum->b = NAD27_SEMIMINOR;
+		dem_datum->e_2 = NAD27_E_SQUARED;
+		dem_datum->f_inv = NAD27_F_INV;
+		dem_datum->k0 = UTM_K0;
+		dem_datum->a0 = NAD27_A0;
+		dem_datum->a2 = NAD27_A2;
+		dem_datum->a4 = NAD27_A4;
+		dem_datum->a6 = NAD27_A6;
+
+		fprintf(stderr, "Warning:  The DEM data aren't in a horizontal datum I currently handle.\n");
+		fprintf(stderr, "Defaulting to NAD-27.  This may result in positional errors in the data.\n");
+	}
+
+	/*
+	 * Convert the southeast UTM corner to latitude/longitude
+	 */
+	if (redfearn_inverse(dem_datum, dem_a->se_x_gp, dem_a->se_y_gp, dem_a->zone, &(dem_a->se_lat), &(dem_a->se_long)) != 0)  {
+		fprintf(stderr, "refearn_inv() returns failure, (utm_x = %g, utm_y = %g, utm_zone = %d\n",
+			dem_a->se_x_gp, dem_a->se_y_gp, dem_a->zone);
+		exit(0);
+	}
+
+
+
+	/*
+	 * The next file name we need is the DQHL module, which contains
+	 * some quality information, including the source of the data.
+	 */
+	if (upper_case_flag == 0)  {
+		if (gz_flag != 0)  {
+			strncpy(&file_name[file_name_length - 11], "dqhl.ddf", 8);
+		}
+		else  {
+			strncpy(&file_name[file_name_length - 8], "dqhl.ddf", 8);
+		}
+	}
+	else  {
+		if (gz_flag != 0)  {
+			strncpy(&file_name[file_name_length - 11], "DQHL.DDF", 8);
+		}
+		else  {
+			strncpy(&file_name[file_name_length - 8], "DQHL.DDF", 8);
+		}
+	}
+	/*
+	 * Open the file in preparation for parsing.
+	 */
+	if (begin_ddf(file_name) < 0)  {
+		fprintf(stderr, "Can't open %s for reading, errno = %d\n", file_name, errno);
+		exit(0);
+	}
+	/*
+	 * Loop through the subfields until we find what we want.
+	 */
+	need = 2;
+	record_id = -1;
+	while (get_subfield(&subfield) != 0)  {
+		if (strcmp(subfield.tag, "DQHL") == 0)  {
+			if ((strstr(subfield.format, "I") != (char *)0) && (strcmp(subfield.label, "RCID") == 0))  {
+				save_byte = subfield.value[subfield.length]; subfield.value[subfield.length] = '\0';
+				record_id = strtol(subfield.value, (char **)0, 10);
+				subfield.value[subfield.length] = save_byte;
+			}
+			else if ((record_id == 3) && (strstr(subfield.format, "A") != (char *)0) && (strcmp(subfield.label, "COMT") == 0))  {
+				dem_a->origin_code[0] = ' '; dem_a->origin_code[1] = ' '; dem_a->origin_code[2] = ' '; dem_a->origin_code[3] = ' ';
+				save_byte = subfield.value[subfield.length]; subfield.value[subfield.length] = '\0';
+				if (strncmp(subfield.value, "DEM PRODUCER:  Unspecified", 26) == 0)  {
+					dem_a->origin_code[1] = ' '; dem_a->origin_code[2] = ' '; dem_a->origin_code[3] = ' ';
+				}
+				else if (strncmp(subfield.value, "DEM PRODUCER:  National M", 25) == 0)  {
+					dem_a->origin_code[1] = 'N'; dem_a->origin_code[2] = 'M'; dem_a->origin_code[3] = 'D';
+				}
+				else if (strncmp(subfield.value, "DEM PRODUCER:  Eastern Ma", 25) == 0)  {
+					dem_a->origin_code[1] = 'E'; dem_a->origin_code[2] = 'M'; dem_a->origin_code[3] = 'C';
+				}
+				else if (strncmp(subfield.value, "DEM PRODUCER:  Western Ma", 25) == 0)  {
+					dem_a->origin_code[1] = 'W'; dem_a->origin_code[2] = 'M'; dem_a->origin_code[3] = 'C';
+				}
+				else if (strncmp(subfield.value, "DEM PRODUCER:  Mid-Contin", 25) == 0)  {
+					dem_a->origin_code[0] = 'M'; dem_a->origin_code[1] = 'C'; dem_a->origin_code[2] = 'M'; dem_a->origin_code[3] = 'C';
+				}
+				else if (strncmp(subfield.value, "DEM PRODUCER:  Rocky Moun", 25) == 0)  {
+					dem_a->origin_code[0] = 'R'; dem_a->origin_code[1] = 'M'; dem_a->origin_code[2] = 'M'; dem_a->origin_code[3] = 'C';
+				}
+				else if (strncmp(subfield.value, "DEM PRODUCER:  Forest Ser", 25) == 0)  {
+					dem_a->origin_code[1] = ' '; dem_a->origin_code[2] = 'F'; dem_a->origin_code[3] = 'S';
+				}
+				else if (strncmp(subfield.value, "DEM PRODUCER:  Gestalt Ph", 25) == 0)  {
+					dem_a->origin_code[0] = 'G'; dem_a->origin_code[1] = 'P'; dem_a->origin_code[2] = 'M'; dem_a->origin_code[3] = '2';
+				}
+				else if (strncmp(subfield.value, "DEM PRODUCER:  Contractor", 25) == 0)  {
+					dem_a->origin_code[0] = 'C'; dem_a->origin_code[1] = 'O'; dem_a->origin_code[2] = 'N'; dem_a->origin_code[3] = 'T';
+				}
+				else if (strncmp(subfield.value, "DEM PRODUCER:  ", 15) == 0)  {
+					if (subfield.length > 15)  {
+						strncpy(&(dem_a->origin_code[4 - subfield.length >= 19 ? 4 : subfield.length - 15]),
+							&subfield.value[15],
+							subfield.length >= 19 ? 4 : subfield.length - 15);
+					}
+				}
+				subfield.value[subfield.length] = save_byte;
+				need--;
+			}
+			else if ((record_id == 2) && (strstr(subfield.format, "A") != (char *)0) && (strcmp(subfield.label, "COMT") == 0))  {
+				if (subfield.length >= 14)  {
+					dem_a->process_code = subfield.value[13] - '0';
+				}
+				need--;
+			}
+			if (need == 0)  {
+				/* This is all we need.  Break out of the loop. */
+				break;
+			}
+		}
+	}
+	/* We are done with this file, so close it. */
+	end_ddf();
+	/* Check that we found what we wanted. */
+	if (need > 0)  {
+		fprintf(stderr, "Failed to get needed data from file %s.\n", file_name);
+		return 1;
+	}
+	else if (need < 0)  {
+		fprintf(stderr, "Warning:  Got more data from file %s than expected.\n", file_name);
+	}
+
+	return 0;
+}
+
+
+
+
+/*
+ * In SDTS, all DEMs are stored as a rectangular grid.
+ * Thus, we can use the same routine to process all DEMs,
+ * whether they are in the UTM Planimetric Reference System,
+ * or the Geographic Planimetric Reference System (latitude/longitude).
+ * (Currently, we have no samples of geographic-style data.)
+ *
+ * This function returns 0 if it allocates memory and reads in the data.
+ * It returns 1 if it doesn't allocate memory.
+ */
+int
+process_dem_sdts(unsigned char *file_name, struct image_corners *image_corners,
+		struct dem_corners *dem_corners, struct dem_record_type_a *dem_a, struct datum *dem_datum)
+{
+	long i, j, k;
+	union {
+		unsigned long i;
+		float f;
+	} conv;
+	double f, g;
+	long x, y;
+	short s1, s2;
+	short *sptr, *sptr2;
+	short *sptr_up;
+	short *sptr_down;
+	short *tmp_row;
+	ssize_t ret_val;
+	long profile_rows, profile_columns;
+	long dem_size_x, dem_size_y;
+	double lat_min, long_min;
+	double lat_max, long_max;
+	double x_gp_min, y_gp_min;
+	double x_gp_max, y_gp_max;
+	long lat_min_loc_i, lat_min_loc_j;
+	long lat_max_loc_i, lat_max_loc_j;
+	long long_min_loc_i, long_min_loc_j;
+	long long_max_loc_i, long_max_loc_j;
+	int x_gp_min_loc_i, x_gp_min_loc_j;
+	int x_gp_max_loc_i, x_gp_max_loc_j;
+	int y_gp_min_loc_i, y_gp_min_loc_j;
+	int y_gp_max_loc_i, y_gp_max_loc_j;
+	double x_gp, y_gp;
+	double m, b;
+	double latitude, longitude;
+	long elevation;
+	long longest_profile = -1;
+	long easternmost_full_profile;
+	unsigned char save_byte;
+	FILE *file_stream;
+	long byte_order;
+	long upper_case_flag;
+	struct subfield subfield;
+	long get_ret;
+
+
+	/* Begin by finding the native byte-order on this machine. */
+	byte_order = swab_type();
+
+
+	/*
+	 * Make sure that the UTM zone information isn't bogus.
+	 */
+	if ((dem_a->zone < 1) || (dem_a->zone > 60))  {
+		fprintf(stderr, "DEM file contains a bad UTM zone (%d).  File ignored.\n", dem_a->zone);
+		return 1;
+	}
+
+
+	/*
+	 * We need to find the location of the first elevation sample in the first
+	 * profile.  This procedure is laid out in detail in the DEM standards documents,
+	 * complete with nice pictures of the geometry, so I won't describe all of the
+	 * details here.  Basically, though, the samples are at UTM coordinates that are
+	 * evenly divisible by the 30-meter sample spacing (or divisible by 10 meters if
+	 * the sample spacing is 10 meters).  We need to find the first set of coordinates
+	 * that have round-numbered values just inside the SW corner.  The procedure varies
+	 * depending on whether the data block is west or east of the central meridian.
+	 *
+	 * Actually, we don't need to do this the hard way, since each profile header
+	 * contains the starting UTM coordinates of the profile.  However, the method is
+	 * worth encapulating here in case we need to do something like it later.  The
+	 * method comes straight from the DEM standards documents.
+	 */
+//	if ((0.5 * (dem_a->sw_x + dem_a->se_x)) < 500000.0)  {
+//		/* West of central meridian. */
+//		sw_x = dem_a->x_res * ceil(dem_a->sw_x / dem_a->x_res);
+//		m = (dem_a->se_y - dem_a->sw_y) / (dem_a->se_x - dem_a->sw_x);
+//		b = dem_a->sw_y - m * dem_a->sw_x;
+//		sw_y = dem_a->y_res * ceil((b + m * sw_x) / dem_a->y_res);
+//	}
+//	else  {
+//		/* East of central meridian. */
+//		sw_x = dem_a->x_res * ceil(dem_a->nw_x / dem_a->x_res);
+//		m = (dem_a->nw_y - dem_a->sw_y) / (dem_a->nw_x - dem_a->sw_x);
+//		b = dem_a->sw_y - m * dem_a->sw_x;
+//		sw_y = dem_a->y_res * ceil((b + m * sw_x) / dem_a->y_res);
+//	}
+
+
+	/*
+	 * Convert UTM coordinates of corners into latitude/longitude pairs.
+	 */
+	(void)redfearn_inverse(dem_datum, dem_a->sw_x_gp, dem_a->sw_y_gp, dem_a->zone, &(dem_corners->sw_lat), &(dem_corners->sw_long));
+	(void)redfearn_inverse(dem_datum, dem_a->nw_x_gp, dem_a->nw_y_gp, dem_a->zone, &(dem_corners->nw_lat), &(dem_corners->nw_long));
+	(void)redfearn_inverse(dem_datum, dem_a->ne_x_gp, dem_a->ne_y_gp, dem_a->zone, &(dem_corners->ne_lat), &(dem_corners->ne_long));
+	(void)redfearn_inverse(dem_datum, dem_a->se_x_gp, dem_a->se_y_gp, dem_a->zone, &(dem_corners->se_lat), &(dem_corners->se_long));
+	dem_corners->sw_x_gp = dem_a->sw_x_gp; dem_corners->sw_y_gp = dem_a->sw_y_gp;
+	dem_corners->nw_x_gp = dem_a->nw_x_gp; dem_corners->nw_y_gp = dem_a->nw_y_gp;
+	dem_corners->ne_x_gp = dem_a->ne_x_gp; dem_corners->ne_y_gp = dem_a->ne_y_gp;
+	dem_corners->se_x_gp = dem_a->se_x_gp; dem_corners->se_y_gp = dem_a->se_y_gp;
+
+	/*
+	 * If the DEM data don't overlap the image, then ignore them.
+	 *
+	 * If the user didn't specify latitude/longitude ranges for the image,
+	 * then we simply use this DEM to determine those boundaries.  In this
+	 * latter case, no overlap check is necessary (or possible) since the
+	 * image boundaries will be determined later.
+	 *
+	 * Actually, no overlap check is needed, anyway, since the main routine
+	 * will ignore data that is out of bounds.  But we can save a whole
+	 * lot of processing if we can detect out-of-bounds data here.
+	 */
+	if (image_corners->sw_lat < image_corners->ne_lat)  {
+		/* The user has specified image boundaries.  Check for overlap. */
+		if ((dem_corners->sw_lat >= image_corners->ne_lat) || ((dem_corners->ne_lat) <= image_corners->sw_lat) ||
+		    (dem_corners->sw_long >= image_corners->ne_long) || ((dem_corners->ne_long) <= image_corners->sw_long))  {
+			return 1;
+		}
+	}
+
+	dem_size_x = dem_a->cols;
+	dem_size_y = dem_a->rows;
+
+	/*
+	 * Since SDTS DEMs are padded to make them rectangular, we con't have to worry about partial
+	 * profiles, like we do with old-format DEMs.  Thus, we just read in the data, convert it into
+	 * internal form, and store it in the array.
+	 * Begin by allocating the memory array.
+	 */
+	dem_corners->ptr = (short *)malloc(sizeof(short) * dem_size_x * dem_size_y);
+	if (dem_corners->ptr == (short *)0)  {
+		fprintf(stderr, "malloc of dem_corners->ptr failed\n");
+		exit(0);
+	}
+	/*
+	 * Open the file in preparation for parsing.
+	 */
+	if (begin_ddf(file_name) < 0)  {
+		fprintf(stderr, "Can't open %s for reading, errno = %d\n", file_name, errno);
+		exit(0);
+	}
+	/*
+	 * Loop through the subfields until we find what we want.
+	 */
+	for (j = 0; j < dem_size_y; j++)  {
+		while ((get_ret = get_subfield(&subfield)) != 0)  {
+			/*
+			 * Skip unwanted subfields at the beginning of the record.
+			 */
+			if (strcmp(subfield.tag, "CVLS") == 0)  {
+				break;
+			}
+		}
+		if (get_ret == 0)  {
+			/* At end of file and we still haven't found what we need. */
+			fprintf(stderr, "Ran out of data in file %s.  Ignoring file.\n", file_name);
+			end_ddf();
+			return 1;
+		}
+		for (i = 0; i < dem_size_x; i++)  {
+			if ((strstr(subfield.format, "B") != (char *)0) && (strcmp(subfield.label, "ELEVATION") == 0))  {
+				sptr = (dem_corners->ptr + j * dem_size_x + i);
+				/*
+				 * These values, rather than being stored in 'I' format (integer numbers),
+				 * are stored in two's-complement binary.  Thus, they must be properly swabbed
+				 * during conversion to internal form.
+				 */
+				if (subfield.length == 2)  {
+					if (byte_order == 0)  {
+						*sptr = (((long)subfield.value[1] << 8) & 0x0000ff00) + ((long)subfield.value[0] & 0x000000ff);
+					}
+					else  {
+						*sptr = (((long)subfield.value[0] << 8) & 0x0000ff00) + ((long)subfield.value[1] & 0x000000ff);
+					}
+				}
+				else if (subfield.length == 4)  {
+					/*
+					 * Note:  When the length is 4, we assume that this is a
+					 * BFP32 value, which means that it is a raw binary IEEE 754
+					 * floating point number.  Thus, this conversion won't work
+					 * on machines where IEEE 754 is not the native floating point
+					 * format.  We could convert from binary into the native floating
+					 * point format the hard way, but it appears that most machines
+					 * support IEEE 754, so we will try it this way for a while.
+					 */
+					conv.i = (((long)subfield.value[3] & 0xff) << 24) |
+						  (((long)subfield.value[2] & 0xff) << 16) |
+						  (((long)subfield.value[1] & 0xff) <<  8) |
+						   ((long)subfield.value[0] & 0xff);
+					if (byte_order == 0)  {
+						/* Do nothing. */
+					}
+					else if (byte_order == 1)  {
+						LE_SWAB(&conv.i);
+					}
+					else if (byte_order == 2)  {
+						PDP_SWAB(&conv.i);
+					}
+					*sptr = round(conv.f);
+				}
+				else  {
+					/* Error */
+					*sptr = HIGHEST_ELEVATION;
+				}
+				if (*sptr == dem_a->edge_fill)  {
+					/* This is a point, along the edges of the quad, that doesn't contain valid data. */
+					*sptr = HIGHEST_ELEVATION;
+				}
+				else if (*sptr == 32767)  {
+					/*
+					 * Some DEM files appear to mark invalid data points with 32767.
+					 * I can think of two possible reasons for this; but these are just
+					 * guesses, and the real reason may be entirely different.
+					 * First guess:  it may have been a human data entry error, since the
+					 * edge_fill value is normally -32767.  Second guess:  for a while, it
+					 * may have been the standard was to use 32767 for an edge_fill marker in
+					 * the original (non-SDTS) DEM files; and these values may have been carried
+					 * over as part of the automated conversion process.  I don't know
+					 * how these values got there, but they are clearly not valid elevations.
+					 *
+					 * One concern from all of this is that some of the original DEM files
+					 * appear to contain either of 32767 and -32767 as non-valid data
+					 * markers.  (Perhaps sometimes both, although I haven't located such a
+					 * file yet.)  They may have been automatically carried over into the
+					 * SDTS files during automated conversion.  For the 32767 value, this
+					 * wouldn't appear to be a big problem, because we can still detect it.
+					 * However, the -32767 value is identical to the normal SDTS void_fill
+					 * value.  Thus, unless -32767 meant void_fill in the original DEM files,
+					 * this value may be misinterpreted after the conversion to SDTS.
+					 *
+					 * We treat 32767 in the same way as the edge_fill marker, and convert
+					 * it to HIGHEST_ELEVATION.
+					 */
+					*sptr = HIGHEST_ELEVATION;
+				}
+				else if (*sptr == dem_a->void_fill)  {
+					/* This is a point, somewhere within the quad, that falls within a void in the data. */
+					*sptr = 0;
+				}
+				else if (dem_a->elev_units == 1)  {
+					/*
+					 * The main body of drawmap likes to work in meters.
+					 * We satisfy that desire by changing feet into meters
+					 * before passing the data back.
+					 *
+					 * We alter the header information below, after all data
+					 * points have been processed.
+					 */
+					*sptr = (short)round((double)*sptr * 0.3048);
+				}
+
+				if (i == (dem_size_x - 1))  {
+					break;
+				}
+			}
+
+			if (get_subfield(&subfield) == 0)  {
+				fprintf(stderr, "Shortage of data in %s.  Ignoring file.\n", file_name);
+				end_ddf();
+				return 1;
+			}
+			if (strcmp(subfield.tag, "CVLS") != 0)  {
+				/* There weren't the expected number of elevations in the row. */
+				fprintf(stderr, "Shortage of data in %s.  Ignoring file.\n", file_name);
+				end_ddf();
+				return 1;
+			}
+		}
+	}
+	/* We are done with this file, so close it. */
+	end_ddf();
+
+
+	/*
+	 * The main body of drawmap likes to work in meters.
+	 * We satisfy that desire by changing feet into meters
+	 * before passing the data back.
+	 *
+	 * Here we change the header information.  We already changed the
+	 * actual elevation data above.
+	 */
+	if (dem_a->elev_units == 1)  {
+		dem_a->elev_units = 2;
+	}
+
+
+	dem_corners->x = dem_size_x;
+	dem_corners->y = dem_size_y;
+	dem_corners->x_gp_min = dem_a->x_gp_first;
+	dem_corners->y_gp_min = dem_a->y_gp_first - ((double)dem_size_y - 1.0) * dem_a->y_res;
+	dem_corners->x_gp_max = dem_a->x_gp_first + ((double)dem_size_x - 1.0) * dem_a->x_res;
+	dem_corners->y_gp_max = dem_a->y_gp_first;
+
+// For debugging.
+//	for (i = 0; i < dem_size_x; i++)  {
+//		for (j = 0; j < dem_size_y; j++)  {
+//			if (*(dem_corners->ptr + j * dem_size_x + i) == HIGHEST_ELEVATION)  {
+//				fprintf(stderr, "FYI:  HIGHEST_ELEVATION at %d %d\n", i, j);
+//			}
+//		}
+//	}
+
+	return 0;
+}

Added: packages/drawmap/branches/upstream/current/dlg.c
===================================================================
--- packages/drawmap/branches/upstream/current/dlg.c	                        (rev 0)
+++ packages/drawmap/branches/upstream/current/dlg.c	2007-05-18 12:29:03 UTC (rev 834)
@@ -0,0 +1,1577 @@
+/*
+ * =========================================================================
+ * dlg.c - Routines to handle DLG data.
+ * Copyright (c) 2000,2001  Fred M. Erickson
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2, or (at your option)
+ * any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program; if not, write to the Free Software
+ * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
+ * =========================================================================
+ */
+
+#include <fcntl.h>
+#include <math.h>
+#include <stdlib.h>
+#include <unistd.h>
+#include <stdio.h>
+#include <errno.h>
+#include "drawmap.h"
+#include "dlg.h"
+
+
+
+/*
+ * Storage for attribute types.
+ */
+long num_A_attrib;
+long num_L_attrib;
+struct maj_min attributes_A[MAX_A_ATTRIB];
+struct maj_min attributes_L[MAX_L_ATTRIB];
+
+
+/*
+ * The code that processes DLG files is very spaghetti-like, since
+ * it got squeezed and twisted and stretched while I figured out how
+ * DLG files are put together.
+ *
+ * Because of this, and because I don't like to write functions that
+ * take 35 arguments, there are a lot of global variables used by the
+ * DLG code.  Most of them are accumulated here.
+ */
+
+/*
+ * The sizes of the nodes, areas, and lines arrays are their theoretical maximum values.
+ * It would probably be cooler to malloc() these as we go, but coolness was not an
+ * objective of this program.  It would still be cool to read the maximum values from
+ * the DLG file headers and check them against the values below to verify that
+ * the standards haven't changed and left this program behind.
+ */
+struct nodes nodes[MAX_NODES];
+struct areas areas[MAX_AREAS];
+struct lines lines[MAX_LINES];
+
+double lat_se, long_se, lat_sw, long_sw, lat_ne, long_ne, lat_nw, long_nw;
+static double grid_x_se, grid_y_se, grid_x_sw, grid_y_sw, grid_x_ne, grid_y_ne, grid_x_nw, grid_y_nw;
+long dlg_x_low, dlg_y_low, dlg_x_high, dlg_y_high;
+long x_prime;
+
+long utm_zone;
+
+long right_border = RIGHT_BORDER;
+
+
+/*
+ * Process the data from an optional-format DLG file.
+ * If you haven't read the DLG file guide and looked at a
+ * DLG file, this code will probably be incomprehensible.
+ */
+void
+process_dlg_optional(int fdesc, int gz_flag, struct image_corners *image_corners, long info_flag)
+{
+	long i, j, ret_val;
+	long count;
+	long color;
+	char *end_ptr;
+	char buf[DLG_RECORD_LENGTH + 1];
+	char buf2[DLG_RECORD_LENGTH + 1];
+	struct point **current_point;
+	struct point *tmp_point;
+	struct attribute **current_attrib;
+	struct attribute *tmp_attrib;
+	long attrib;
+	long line_list;
+	long num_nodes = 0;
+	long num_areas = 0;
+	long num_lines = 0;
+	long data_type = 0;
+	double latitude1, longitude1, latitude2, longitude2;
+	ssize_t (* read_function)(int, void *, size_t);
+	long plane_ref;
+	char save_byte;
+	long datum_number;
+	struct datum datum;
+
+	x_prime = image_corners->x + LEFT_BORDER + right_border;
+
+
+	if (gz_flag == 0)  {
+		read_function = buf_read;
+	}
+	else  {
+		read_function = buf_read_z;
+	}
+
+	/*
+	 * Some newer DLG files now come with newlines embedded in them.
+	 * (Older files - at least the ones I checked - did not.  They
+	 * were one long blob of ASCII text, without any newlines at all.)
+	 * In these newer files, the records aren't of fixed length.
+	 * Figure out which type of file we have by examining the
+	 * first record.  Since the old records were DLG_RECORD_LENGTH bytes long,
+	 * we examine up to DLG_RECORD_LENGTH bytes of the first record.  If no newline
+	 * is found, we read fixed-length records.  If a newline is found,
+	 * then we switch our reading routine to be get_a_line or get_a_line_z,
+	 * which read up to a newline and stop.
+	 */
+	for (i = 0; i < DLG_RECORD_LENGTH; i++)  {
+		if ((ret_val = read_function(fdesc, &buf[i], 1)) != 1)  {
+			fprintf(stderr, "1 record DLG read returns %d\n", ret_val);
+			exit(0);
+		}
+		/*
+		 * We assume here that all files with both '\n' and '\r' at the end
+		 * have the '\n' at the end.
+		 */
+		if (buf[i] == '\n')  {
+			if (read_function == buf_read)  {
+				read_function = get_a_line;
+			}
+			else  {
+				read_function = get_a_line_z;
+			}
+			break;
+		}
+	}
+	/* Set ret_val, just in case we need to parse this record some day. */
+	ret_val = i;
+	if (buf[ret_val - 1] == '\r')  {
+		ret_val--;
+	}
+
+	/*
+	 * There is a lot of information in the file header.  We extract
+	 * those items we care about and ignore the rest.
+	 * We aren't interested in the first 10 records (for now), so ignore them.
+	 * We already read the first record, so continue with the second.
+	 */
+	if ((ret_val = read_function(fdesc, buf, DLG_RECORD_LENGTH)) <= 0)  {
+		fprintf(stderr, "2 record DLG read returns %d\n", ret_val);
+		exit(0);
+	}
+	if (info_flag != 0)  {
+		/*
+		 * If we are trying to print file information, then find the text that
+		 * tells which postal codes (e.g. MT, RI, TX) this DLG file touches.
+		 * In the process of doing this, we will also delimit the DLG name,
+		 * and can print that out as well.
+		 */
+		buf[ret_val] = '\0'; 
+		for (i = 0; i < ret_val; i++)  { 
+			if (buf[i] == ',')  { 
+				fprintf(stdout, "\t%.*s", i, buf);	// Print DLG name
+				i++;
+				for (; i < ret_val; i++)  { 
+					if (buf[i] != ' ')  { 
+						break;
+					}
+				}
+				break;
+			}
+		}
+		for (j = i + 1; j < ret_val; j++)  { 
+			/* Sometimes, postal codes are separated by a space, so check for two spaces. */
+			if ((buf[j] == ' ') && (buf[j + 1] == ' '))  {
+				buf[j] = '\0'; 
+				break;
+			}
+		}
+		fprintf(stdout, "\t%s", &buf[i]);
+	}
+	if ((ret_val = read_function(fdesc, buf, DLG_RECORD_LENGTH)) <= 0)  {
+		fprintf(stderr, "3 record DLG read returns %d\n", ret_val);
+		exit(0);
+	}
+	if ((ret_val = read_function(fdesc, buf, DLG_RECORD_LENGTH)) <= 0)  {
+		/*
+		 * We are interested in three fields from this record.
+		 * Bytes 7-12 (numbered from 1) contain the ground
+		 * planimetric reference system.  This should be 1 for
+		 * UTM (used with both 24K and 100K files) or 3 for Albers
+		 * Conical Equal Area (used for 2M files).
+		 * Bytes 13-18 give the zone for the given planimetric
+		 * reference system, which for 24K and 100K files is the
+		 * UTM zone, and which is set to 9999 for 2M files.
+		 * Bytes 67-69 give the horizontal datum for the given planimetric
+		 * reference system.  'b' or 0 = NAD 27, 1 = NAD 83, 2 = Puerto Rico,
+		 * 3 = Old Hawaiian, 4 = local (astro).
+		 */
+		fprintf(stderr, "4 record DLG read returns %d\n", ret_val);
+		exit(0);
+	}
+	else  {
+		save_byte = buf[12]; buf[12] = '\0'; plane_ref = strtol(&buf[6], &end_ptr, 10); buf[12] = save_byte;
+		if (plane_ref != 1)  {
+			fprintf(stderr, "DLG file does not use UTM ground planimetric coordinates.\nDrawmap can't handle it.  Exiting.  (Plane_ref = %d)\n", plane_ref);
+			exit(0);
+		}
+
+		save_byte = buf[18]; buf[18] = '\0'; utm_zone = strtol(&buf[12], &end_ptr, 10); buf[18] = save_byte;
+		if ((utm_zone < 1) || (utm_zone > 60))  {
+			fprintf(stderr, "DLG file contains bad UTM zone %d.  Drawmap can't handle it.  Exiting.\n", utm_zone);
+			exit(0);
+		}
+
+		if (ret_val >= 69)  {
+			save_byte = buf[69]; buf[69] = '\0'; datum_number = strtol(&buf[66], &end_ptr, 10); buf[69] = save_byte;
+		}
+		else  {
+			datum_number = 0;
+		}
+		if ((buf[68] == 'b') || (datum_number == 0))  {
+			/*
+			 * The file uses the NAD-27 datum.
+			 * Initialize the datum parameters.
+			 */
+			datum.a = NAD27_SEMIMAJOR;
+			datum.b = NAD27_SEMIMINOR;
+			datum.e_2 = NAD27_E_SQUARED;
+			datum.f_inv = NAD27_F_INV;
+			datum.k0 = UTM_K0;
+			datum.a0 = NAD27_A0;
+			datum.a2 = NAD27_A2;
+			datum.a4 = NAD27_A4;
+			datum.a6 = NAD27_A6;
+		}
+		else if (datum_number == 1)  {
+			/*
+			 * The file uses the NAD-83 datum.
+			 * Initialize the datum parameters.
+			 */
+			datum.a = NAD83_SEMIMAJOR;
+			datum.b = NAD83_SEMIMINOR;
+			datum.e_2 = NAD83_E_SQUARED;
+			datum.f_inv = NAD83_F_INV;
+			datum.k0 = UTM_K0;
+			datum.a0 = NAD83_A0;
+			datum.a2 = NAD83_A2;
+			datum.a4 = NAD83_A4;
+			datum.a6 = NAD83_A6;
+		}
+		else  {
+			/*
+			 * We don't currently handle any other datums.
+			 * Default to tne NAD-27 datum.
+			 */
+			datum.a = NAD27_SEMIMAJOR;
+			datum.b = NAD27_SEMIMINOR;
+			datum.e_2 = NAD27_E_SQUARED;
+			datum.f_inv = NAD27_F_INV;
+			datum.k0 = UTM_K0;
+			datum.a0 = NAD27_A0;
+			datum.a2 = NAD27_A2;
+			datum.a4 = NAD27_A4;
+			datum.a6 = NAD27_A6;
+
+			fprintf(stderr, "DLG file uses a horizontal datum that drawmap doesn't know about.\n");
+			fprintf(stderr, "Defaulting to NAD-27.  This may result in positional errors in the map.\n");
+		}
+	}
+	if ((ret_val = read_function(fdesc, buf, DLG_RECORD_LENGTH)) <= 0)  {
+		fprintf(stderr, "5 record DLG read returns %d\n", ret_val);
+		exit(0);
+	}
+	if ((ret_val = read_function(fdesc, buf, DLG_RECORD_LENGTH)) <= 0)  {
+		fprintf(stderr, "6 record DLG read returns %d\n", ret_val);
+		exit(0);
+	}
+	if ((ret_val = read_function(fdesc, buf, DLG_RECORD_LENGTH)) <= 0)  {
+		fprintf(stderr, "7 record DLG read returns %d\n", ret_val);
+		exit(0);
+	}
+	if ((ret_val = read_function(fdesc, buf, DLG_RECORD_LENGTH)) <= 0)  {
+		fprintf(stderr, "8 record DLG read returns %d\n", ret_val);
+		exit(0);
+	}
+	if ((ret_val = read_function(fdesc, buf, DLG_RECORD_LENGTH)) <= 0)  {
+		fprintf(stderr, "9 record DLG read returns %d\n", ret_val);
+		exit(0);
+	}
+	if ((ret_val = read_function(fdesc, buf, DLG_RECORD_LENGTH)) <= 0)  {
+		fprintf(stderr, "10 record DLG read returns %d\n", ret_val);
+		exit(0);
+	}
+	if ((ret_val = read_function(fdesc, buf, DLG_RECORD_LENGTH)) <= 0)  {
+		fprintf(stderr, "11 record DLG read returns %d\n", ret_val);
+		exit(0);
+	}
+	else  {
+		for (i = 0; i < ret_val; i++)  {
+			/* The DLG files use 'D' for exponentiation.  strtod() expects 'E' or 'e'. */
+			if (buf[i] == 'D') buf[i] = 'E';
+		}
+		i = 6;
+		lat_sw = strtod(&buf[i], &end_ptr);
+		i = i + end_ptr - &buf[i];
+
+		long_sw = strtod(&buf[i], &end_ptr);
+		i = i + end_ptr - &buf[i];
+
+		grid_x_sw = strtod(&buf[i], &end_ptr);
+		i = i + end_ptr - &buf[i];
+
+		grid_y_sw = strtod(&buf[i], &end_ptr);
+	}
+	if ((ret_val = read_function(fdesc, buf, DLG_RECORD_LENGTH)) <= 0)  {
+		fprintf(stderr, "12 record DLG read returns %d\n", ret_val);
+		exit(0);
+	}
+	else  {
+		for (i = 0; i < ret_val; i++)  {
+			/* The DLG files use 'D' for exponentiation.  strtod() expects 'E' or 'e'. */
+			if (buf[i] == 'D') buf[i] = 'E';
+		}
+		i = 6;
+		lat_nw = strtod(&buf[i], &end_ptr);
+		i = i + end_ptr - &buf[i];
+
+		long_nw = strtod(&buf[i], &end_ptr);
+		i = i + end_ptr - &buf[i];
+
+		grid_x_nw = strtod(&buf[i], &end_ptr);
+		i = i + end_ptr - &buf[i];
+
+		grid_y_nw = strtod(&buf[i], &end_ptr);
+	}
+	if ((ret_val = read_function(fdesc, buf, DLG_RECORD_LENGTH)) <= 0)  {
+		fprintf(stderr, "13 record DLG read returns %d\n", ret_val);
+		exit(0);
+	}
+	else  {
+		for (i = 0; i < ret_val; i++)  {
+			/* The DLG files use 'D' for exponentiation.  strtod() expects 'E' or 'e'. */
+			if (buf[i] == 'D') buf[i] = 'E';
+		}
+		i = 6;
+		lat_ne = strtod(&buf[i], &end_ptr);
+		i = i + end_ptr - &buf[i];
+
+		long_ne = strtod(&buf[i], &end_ptr);
+		i = i + end_ptr - &buf[i];
+
+		grid_x_ne = strtod(&buf[i], &end_ptr);
+		i = i + end_ptr - &buf[i];
+
+		grid_y_ne = strtod(&buf[i], &end_ptr);
+	}
+	if ((ret_val = read_function(fdesc, buf, DLG_RECORD_LENGTH)) <= 0)  {
+		fprintf(stderr, "14 record DLG read returns %d\n", ret_val);
+		exit(0);
+	}
+	else  {
+		for (i = 0; i < ret_val; i++)  {
+			/* The DLG files use 'D' for exponentiation.  strtod() expects 'E' or 'e'. */
+			if (buf[i] == 'D') buf[i] = 'E';
+		}
+		i = 6;
+		lat_se = strtod(&buf[i], &end_ptr);
+		i = i + end_ptr - &buf[i];
+
+		long_se = strtod(&buf[i], &end_ptr);
+		i = i + end_ptr - &buf[i];
+
+		grid_x_se = strtod(&buf[i], &end_ptr);
+		i = i + end_ptr - &buf[i];
+
+		grid_y_se = strtod(&buf[i], &end_ptr);
+	}
+	if ((ret_val = read_function(fdesc, buf, DLG_RECORD_LENGTH)) <= 0)  {
+		fprintf(stderr, "15 record DLG read returns %d\n", ret_val);
+		exit(0);
+	}
+	else  {
+		/*
+		 * According to the DLG standard, the first four characters of the short-form
+		 * theme name from the header are verified.  Thus, it should be okay
+		 * to key off the first two characters to find the type of data.
+		 */
+		switch(buf[0])  {
+		case 'B':	/* BOUNDARIES */
+			color = GRAY;
+			data_type = BOUNDARIES;
+			break;
+		case 'H':
+			if (buf[2] == 'D')  {
+				/* HYDROGRAPHY */
+				color = B_BLUE;
+				data_type = HYDROGRAPHY;
+				break;
+			}
+			else  {
+				/* HYPSOGRAPHY */
+				color = L_ORANGE;
+				data_type = HYPSOGRAPHY;
+				break;
+			}
+		case 'P':
+			if (buf[1] == 'I')  {
+				/* PIPE & TRANS LINES */
+				color = BLACK;
+				data_type = PIPE_TRANS_LINES;
+				break;
+			}
+			else  {
+				/* PUBLIC LAND SURVEYS */
+				color = BLACK;
+				data_type = PUBLIC_LAND_SURVEYS;
+				break;
+			}
+		case 'R':
+			if (buf[1] == 'A')  {
+				/* RAILROADS */
+				color = BLACK;
+				data_type = RAILROADS;
+				break;
+			}
+			else  {
+				/* ROADS AND TRAILS */
+				color = B_RED;
+				data_type = ROADS_AND_TRAILS;
+				break;
+			}
+		case 'M':	/* MANMADE FEATURES */
+			color = BLACK;
+			data_type = MANMADE_FEATURES;
+			break;
+		case 'S':	/* SURVEY CONTROL */
+			color = BLACK;
+			data_type = SURVEY_CONTROL;
+			break;
+		case 'V':	/* VEG SURFACE COVER */
+			color = B_GREEN;
+			data_type = VEG_SURFACE_COVER;
+			break;
+		case 'N':	/* NON-VEG FEATURES */
+			color = BLACK;
+			data_type = NON_VEG_FEATURES;
+			break;
+		default:
+			fprintf(stderr, "Unknown record type %20.20s\n", buf);
+			exit(0);
+			break;
+		}
+	}
+
+	/* If info_flag is non-zero, then we just want to print some info about the DLG file and return. */
+	if (info_flag != 0)  {
+		/* Put a null character at the end of the category name (theme). */
+		for (i = 19; i >= 0; i--)  {
+			if (buf[i] != ' ')  {
+				buf[i + 1] = '\0';
+				break;
+			}
+		}
+		if (i == -1)  {
+			buf[0] = '\0';
+		}
+		fprintf(stdout, "\t%.20s\t%g:%g:%g:%g\t%s\n", buf, lat_se, long_se, lat_nw, long_nw,
+			(read_function == get_a_line || read_function == get_a_line_z) ? "linefeeds=yes" : "linefeeds=no");
+		return;
+	}
+
+
+	/*
+	 * Within the Optional-format DLG file, locations are specified with pairs of
+	 * Universal Transverse Mercator (x,y) coordinates.
+	 *
+	 * The header information at the top of the DLG file gives 4 reference
+	 * points for the corners of the polygon represented by the DLG data.  Here is a
+	 * typical set of them:
+	 *
+	 *	SW       45.750000 -112.000000         422218.03  5066539.80                    
+	 *	NW       46.000000 -112.000000         422565.07  5094315.16                    
+	 *	NE       46.000000 -111.750000         441923.83  5094103.38                    
+	 *	SE       45.750000 -111.750000         441663.14  5066327.07                    
+	 *
+	 * Note that the latitude-longitude points form a square area in latitude/longitude
+	 * space (if latitudes and longitudes on a pseudo-sphere can ever be thought of as
+	 * forming a square).  The UTM (x,y) grid coordinates, however, form a quadrilateral
+	 * in which no two sides have the same length.  Thus, if we are to convert the grid
+	 * points in the DLG file into latitudes and longitudes, we need to develop a general
+	 * transformation between these grid points and the desired latitudes and longitudes.
+	 *
+	 *
+	 * Do a quick check here to find out if the data is off the map boundaries.
+	 * If so, then we can return now and save a lot of work.
+	 */
+	if ((lat_sw > image_corners->ne_lat) ||
+	    (long_sw > image_corners->ne_long) ||
+	    (lat_ne < image_corners->sw_lat) ||
+	    (long_ne < image_corners->sw_long))  {
+		return;
+	}
+
+
+	/*
+	 * Following the DLG header information, there is a sequence of data records for
+	 * Nodes, Areas, and Lines.
+	 * Parse these data records and put the data into the appropriate arrays.
+	 * At present, we make absolutely no use of the Node information, but we parse
+	 * and store it anyway.
+	 */
+	while ((ret_val = read_function(fdesc, buf, DLG_RECORD_LENGTH)) > 0)  {
+		switch(buf[0])  {
+		case 'N':
+			i = 1;
+			nodes[num_nodes].id = strtol(&buf[i], &end_ptr, 10);
+			i = i + end_ptr - &buf[i];
+
+			nodes[num_nodes].x = strtod(&buf[i], &end_ptr);
+			i = i + end_ptr - &buf[i];
+
+			nodes[num_nodes].y = strtod(&buf[i], &end_ptr);
+
+			i = 36;
+			line_list = strtol(&buf[i], &end_ptr, 10);
+
+			i = 48;
+			attrib = strtol(&buf[i], &end_ptr, 10);
+
+			if (line_list != 0)  {
+				while(line_list > 0)  {
+					if ((ret_val = read_function(fdesc, buf2, DLG_RECORD_LENGTH)) <= 0)  {
+						fprintf(stderr, "Line_list read 1 returns %d\n", ret_val);
+						fprintf(stderr, "%80.80s\n", buf);
+						exit(0);
+					}
+
+					line_list = line_list - 12;
+				}
+			}
+
+			if (attrib != 0)  {
+				while (attrib > 0)  {
+					if ((ret_val = read_function(fdesc, buf2, DLG_RECORD_LENGTH)) <= 0)  {
+						fprintf(stderr, "Attribute read 1 returns %d\n", ret_val);
+						fprintf(stderr, "%80.80s\n", buf);
+						exit(0);
+					}
+
+					attrib = attrib - 6;
+				}
+			}
+
+			num_nodes++;
+			break;
+
+		case 'A':
+			i = 1;
+			areas[num_areas].id = strtol(&buf[i], &end_ptr, 10);
+			i = i + end_ptr - &buf[i];
+
+			areas[num_areas].x = strtod(&buf[i], &end_ptr);
+			i = i + end_ptr - &buf[i];
+
+			areas[num_areas].y = strtod(&buf[i], &end_ptr);
+
+			i = 36;
+			line_list = strtol(&buf[i], &end_ptr, 10);
+
+			i = 48;
+			attrib = strtol(&buf[i], &end_ptr, 10);
+			areas[num_areas].number_attrib = attrib;
+
+			if (line_list != 0)  {
+				while (line_list > 0)  {
+					if ((ret_val = read_function(fdesc, buf2, DLG_RECORD_LENGTH)) <= 0)  {
+						fprintf(stderr, "Line_list read 2 returns %d\n", ret_val);
+						fprintf(stderr, "%80.80s\n", buf);
+						exit(0);
+					}
+
+					line_list = line_list - 12;
+				}
+			}
+
+			if (attrib != 0)  {
+				while (attrib > 0)  {
+					if ((ret_val = read_function(fdesc, buf2, DLG_RECORD_LENGTH)) <= 0)  {
+						fprintf(stderr, "Attribute read 2 returns %d\n", ret_val);
+						fprintf(stderr, "%80.80s\n", buf);
+						exit(0);
+					}
+
+					current_attrib = &areas[num_areas].attribute;
+
+					if (attrib > 6)  {
+						i = 6;
+						attrib = attrib - 6;
+					}
+					else  {
+						i = attrib;
+						attrib = 0;
+					}
+
+					end_ptr = buf2;
+
+					while (i > 0)  {
+						*current_attrib = (struct attribute *)malloc(sizeof(struct attribute));
+						if (*current_attrib == (struct attribute *)0)  {
+							fprintf(stderr, "malloc failed\n");
+							exit(0);
+						}
+
+						(*current_attrib)->major = strtol(end_ptr, &end_ptr, 10);
+						(*current_attrib)->minor = strtol(end_ptr, &end_ptr, 10);
+
+						current_attrib = &((*current_attrib)->attribute);
+						i--;
+					}
+					*current_attrib = (struct attribute *)0;
+				}
+			}
+
+			num_areas++;
+			break;
+
+		case 'L':
+			i = 1;
+			lines[num_lines].id = strtol(&buf[i], &end_ptr, 10);
+			i = i + end_ptr - &buf[i];
+
+			lines[num_lines].start_node = strtol(&buf[i], &end_ptr, 10);
+			i = i + end_ptr - &buf[i];
+
+			lines[num_lines].end_node = strtol(&buf[i], &end_ptr, 10);
+			i = i + end_ptr - &buf[i];
+
+			lines[num_lines].left_area = strtol(&buf[i], &end_ptr, 10);
+			i = i + end_ptr - &buf[i];
+
+			lines[num_lines].right_area = strtol(&buf[i], &end_ptr, 10);
+
+			i = 42;
+			lines[num_lines].number_coords = strtol(&buf[i], &end_ptr, 10);
+			i = i + end_ptr - &buf[i];
+
+			attrib = strtol(&buf[i], &end_ptr, 10);
+			lines[num_lines].number_attrib = attrib;
+
+			current_point = &lines[num_lines].point;
+			count = lines[num_lines].number_coords;
+			while (count != 0)  {
+				if ((ret_val = read_function(fdesc, buf2, DLG_RECORD_LENGTH)) <= 0)  {
+					fprintf(stderr, "Coordinate read returns %d\n", ret_val);
+					fprintf(stderr, "%80.80s\n", buf);
+					exit(0);
+				}
+				if ((buf2[ret_val - 1] == '\n') || (buf2[ret_val - 1] == '\r'))  {
+					ret_val--;
+				}
+				if ((buf2[ret_val - 1] == '\n') || (buf2[ret_val - 1] == '\r'))  {
+					ret_val--;
+				}
+
+				i = 0;
+				while (i < ret_val)  {
+					while ((i < ret_val) && (buf2[i] == ' '))  {
+						i++;
+					}
+					if (i >= ret_val)  {
+						break;
+					}
+
+					*current_point = (struct point *)malloc(sizeof(struct point));
+					if (*current_point == (struct point *)0)  {
+						fprintf(stderr, "malloc failed\n");
+						exit(0);
+					}
+
+					(*current_point)->x = (long)strtod(&buf2[i], &end_ptr);
+					i = i + end_ptr - &buf2[i];
+					(*current_point)->y = (long)strtod(&buf2[i], &end_ptr);
+					i = i + end_ptr - &buf2[i];
+
+					current_point = &((*current_point)->point);
+					count--;
+				}
+			}
+			*current_point = (struct point *)0;
+
+			if (attrib != 0)  {
+				while (attrib > 0)  {
+					if ((ret_val = read_function(fdesc, buf2, DLG_RECORD_LENGTH)) <= 0)  {
+						fprintf(stderr, "Attribute read 3 returns %d\n", ret_val);
+						fprintf(stderr, "%80.80s\n", buf);
+						exit(0);
+					}
+
+					current_attrib = &lines[num_lines].attribute;
+
+					if (attrib > 6)  {
+						i = 6;
+						attrib = attrib - 6;
+					}
+					else  {
+						i = attrib;
+						attrib = 0;
+					}
+
+					end_ptr = buf2;
+					while (i > 0)  {
+						*current_attrib = (struct attribute *)malloc(sizeof(struct attribute));
+						if (*current_attrib == (struct attribute *)0)  {
+							fprintf(stderr, "malloc failed\n");
+							exit(0);
+						}
+
+						(*current_attrib)->major = strtol(end_ptr, &end_ptr, 10);
+						(*current_attrib)->minor = strtol(end_ptr, &end_ptr, 10);
+
+						current_attrib = &((*current_attrib)->attribute);
+						i--;
+					}
+					*current_attrib = (struct attribute *)0;
+				}
+			}
+
+			num_lines++;
+			break;
+
+		default:
+			fprintf(stderr, "Unknown record type: %c  (hexadecimal: %x)\n", buf[0], buf[0]);
+//			fprintf(stderr, "%80.80s\n", buf);
+//			exit(0);
+			break;
+		}
+	}
+
+
+	/*
+	 * All of the useful data is parsed.
+	 * Now do something with it.
+	 *
+	 * First find the x and y image coordinates that border this DLG chunk.
+	 *
+	 * Then draw the lines for which we have appropriate atribute codes stored,
+	 * but don't go outside the x-y border.
+	 *
+	 * Then fill in all of the areas for which we have
+	 * appropriate attribute codes stored, but don't go outside
+	 * the x-y border.
+	 */
+	dlg_x_low = -1 + round((long_sw - image_corners->sw_long) * (double)image_corners->x / (image_corners->ne_long - image_corners->sw_long));
+	dlg_y_low = image_corners->y - 1 - round((lat_ne - image_corners->sw_lat) * (double)image_corners->y / (image_corners->ne_lat - image_corners->sw_lat));
+	dlg_x_high = -1 + round((long_ne - image_corners->sw_long) * (double)image_corners->x / (image_corners->ne_long - image_corners->sw_long));
+	dlg_y_high = image_corners->y - 1 - round((lat_sw - image_corners->sw_lat) * (double)image_corners->y / (image_corners->ne_lat - image_corners->sw_lat));
+	if (dlg_x_low < -1)  {
+		dlg_x_low = -1;
+	}
+	if (dlg_y_low < -1)  {
+		dlg_y_low = -1;
+	}
+	if (dlg_x_high >= image_corners->x)  {
+		dlg_x_high = image_corners->x - 1;
+	}
+	if (dlg_y_high >= image_corners->y)  {
+		dlg_y_high = image_corners->y - 1;
+	}
+
+	/*
+	 * Cycle through all of the line data and draw all of the appropriate lines
+	 * onto the image (overlaying any previous data).
+	 */
+	for (i = 0; i < num_lines; i++)  {
+		/*
+		 * In the DLG-3 format, the first area element listed
+		 * represents the universe outside of the map area.
+		 * Thus, lines that have area 1 as a boundary should be
+		 * "neatlines" that bound the map area.
+		 * Since these clutter up a map, we normally discard them.
+		 * (If you want to keep them, then change the #define of OMIT_NEATLINES
+		 * so that it is zero, rather than non-zero.)
+		 *
+		 * Here are relevant quotes from the DLG-3 guide:
+		 *
+		 *	expressed by network data is that of connectivity.  The network case
+		 *	differs from the area case in that, irrespective of the number of closed
+		 *	areas forming the graph, only two areas are encoded:  (1) the area out-
+		 *	side the graph, termed the outside area; and (2) the area within the
+		 *	graph, termed the background area.  All lines except the graph boundary,
+		 *	or neatline, are considered to be contained within the background area.
+		 *
+		 *	map border.  There is one outside area for each DLG-3. It is always the
+		 *	first area encountered (its ID is 1) and has the attribute code 000 0000.
+		 */
+
+		/*
+		 * If the user provided a file full of attributes, then
+		 * use them to control whether or not the lines are drawn.
+		 * If not, then just go ahead and draw everything.
+		 *
+		 * Note:  If a major or minor attribute code in the attribute
+		 *        file (supplied by the user) is less than
+		 *        zero, it is treated as a wild card and matches
+		 *        anything.
+		 */
+		if ((num_A_attrib > 0) || (num_L_attrib > 0))  {
+			if ((OMIT_NEATLINES == 0) || ((lines[i].left_area != 1) && (lines[i].right_area != 1)))  {
+				current_attrib = &lines[i].attribute;
+				if (*current_attrib != (struct attribute *)0)  {
+					while (*current_attrib != (struct attribute *)0)  {
+						for (j = 0; j < num_L_attrib; j++)  {
+							if (((attributes_L[j].major < 0) ||
+							     (attributes_L[j].major == ((*current_attrib)->major))) &&
+							    ((attributes_L[j].minor < 0) ||
+							     (attributes_L[j].minor == ((*current_attrib)->minor))))  {
+								draw_lines(&datum, lines[i].point, color, image_corners);
+								goto FIN1;
+							}
+						}
+						current_attrib = &((*current_attrib)->attribute);
+					}
+				}
+				else  {
+					/*
+					 * If the feature had no attribute codes, then check if
+					 * it is covered by a wild card in the attributes file.
+					 */
+					for (j = 0; j < num_L_attrib; j++)  {
+						if (((attributes_L[j].major < 0) ||
+						     (attributes_L[j].major == data_type)) &&
+						    (attributes_L[j].minor < 0))  {
+							draw_lines(&datum, lines[i].point, color, image_corners);
+							goto FIN1;
+						}
+					}
+				}
+			}
+
+			/*
+			 * For those (hopefully rare) occasions in which something
+			 * goes wrong, we provide the capability for a user to
+			 * specifically request a single line from a DLG file so that
+			 * the cause of the problem can be isolated.
+			 * The user specifies a specific line by providing a major
+			 * attribute number of 10000, and a minor attribute number
+			 * equal to the desired line ID number.  Since no
+			 * valid attribute (as far as I know) is ever as large as
+			 * 10,000, such user-specified attribute pairs will not
+			 * affect the search for legitimate attributes above (since
+			 * they can't possibly match anything).  If we reach this point,
+			 * then we failed to draw a line due to the legitimate-attribute
+			 * checks above; so we give it one more try here, based on
+			 * user-requested ID numbers.
+			 *
+			 * Note:  If you are using this feature, then it doesn't make
+			 *        a lot of sense to process more than one DLG file,
+			 *        since the ID number you give (as the minor attribute)
+			 *        will be matched in every DLG file that has a
+			 *        Line with that ID.  If you are trying to isolate
+			 *        one (or a few) Line(s), then you probably want to
+			 *        be certain which file is the source of the data.
+			 */
+			for (j = 0; j < num_L_attrib; j++)  {
+				if ((attributes_L[j].major == 10000) &&
+				     (attributes_L[j].minor == lines[i].id))  {
+					draw_lines(&datum, lines[i].point, color, image_corners);
+					goto FIN1;
+				}
+			}
+		}
+		else  {
+			if ((OMIT_NEATLINES == 0) || ((lines[i].left_area != 1) && (lines[i].right_area != 1)))  {
+				draw_lines(&datum, lines[i].point, color, image_corners);
+			}
+		}
+FIN1:
+		{;}
+	}
+
+	/*
+	 * Now we fill in each interesting area on the map with the
+	 * same color that bounds the area.  (For example,
+	 * lakes (attribute code 050 0421) might be filled in.)
+	 * However, sometimes areas might be filled in improperly.
+	 * The code assumes that the reference point for an area falls
+	 * within the polygon of lines that define that area.
+	 * According to the DLG guide, this isn't guaranteed
+	 * to always be the case, but the assumption has nonetheless
+	 * worked reasonably well in practice.
+	 *
+	 * Area attributes are processed a bit differently than the
+	 * attributes for lines:  no areas are filled in automatically.
+	 * If the user did not specify any Area attributes in the attribute
+	 * file, then no areas are filled in.  This is because the area-fill
+	 * algorithm can occasionally run amok, and therefore the appropriate
+	 * default is to not give it a chance.  For extensive details on the
+	 * area-fill algorithm, see the comments at the top of fill_area().
+	 */
+	if (num_A_attrib > 0)  {
+		for (i = 0; i < num_areas; i++)  {
+			if (areas[i].number_attrib <= 0)  {
+				continue;
+			}
+
+			current_attrib = &areas[i].attribute;
+			while (*current_attrib != (struct attribute *)0)  {
+				for (j = 0; j < num_A_attrib; j++)  {
+					if (((attributes_A[j].major < 0) ||
+					     (attributes_A[j].major == ((*current_attrib)->major))) &&
+					    ((attributes_A[j].minor < 0) ||
+					     (attributes_A[j].minor == ((*current_attrib)->minor))))  {
+						fill_area(&datum, areas[i].x, areas[i].y, color, image_corners);
+						goto FIN2;
+					}
+				}
+				current_attrib = &((*current_attrib)->attribute);
+			}
+
+			/*
+			 * As with the Line attributes, we provide an interface
+			 * for the user to select specific areas, via their IDs.
+			 */
+			for (j = 0; j < num_A_attrib; j++)  {
+				if ((attributes_A[j].major == 10000) &&
+				     (attributes_A[j].minor == areas[i].id))  {
+					fill_area(&datum, areas[i].x, areas[i].y, color, image_corners);
+					goto FIN2;
+				}
+			}
+FIN2:
+			{;}
+		}
+	}
+
+
+	/* Free up all of the malloc() memory */
+	for (i = 0; i < num_lines; i++)  {
+		if (lines[i].number_coords > 0)  {
+			current_point = &lines[i].point;
+
+			while (*current_point != (struct point *)0)  {
+				tmp_point = (*current_point)->point;
+				free(*current_point);
+				*current_point = tmp_point;
+			}
+		}
+		if (lines[i].number_attrib > 0)  {
+			current_attrib = &lines[i].attribute;
+
+			while (*current_attrib != (struct attribute *)0)  {
+				tmp_attrib = (*current_attrib)->attribute;
+				free(*current_attrib);
+				*current_attrib = tmp_attrib;
+			}
+		}
+	}
+	for (i = 0; i < num_areas; i++)  {
+		if (areas[i].number_attrib > 0)  {
+			current_attrib = &areas[i].attribute;
+
+			while (*current_attrib != (struct attribute *)0)  {
+				tmp_attrib = (*current_attrib)->attribute;
+				free(*current_attrib);
+				*current_attrib = tmp_attrib;
+			}
+		}
+	}
+}
+
+
+
+/*
+ * Draw a series of line segments, as defined by a linked list of
+ * points from an optional-format DLG file.
+ *
+ * This routine is recursive, not because it has to be, but because
+ * it was slightly simpler that way.  Since it doesn't recurse very
+ * far (on average), it isn't a performance or memory problem.
+ *
+ * It is a nasty routine to understand, because it has a generalized
+ * interpolation algorithm to capture line segments that go beyond the
+ * image boundaries.
+ */
+void
+draw_lines(struct datum *datum, struct point *cur_point, long color, struct image_corners *image_corners)
+{
+	double latitude1, longitude1;
+	double latitude2, longitude2;
+	long xx1, yy1;
+	long xx2, yy2;
+	double fxx, fyy;
+	double delta_x, delta_y;
+	long steps;
+	long i;
+	double m_lat, m_long, b_lat, b_long;
+	double p_lat1, p_long1, p_lat2, p_long2;
+	double d_lat, d_long;
+	long pointflags = 0;
+	long bothflag = 0;
+
+	/*
+	 * We recurse to the end of the linked list, and then draw line
+	 * segments as we pop back up the recursion stack.
+	 */
+	if (cur_point->point != (struct point *)0)  {
+		draw_lines(datum, cur_point->point, color, image_corners);
+
+		/*
+		 * Draw a segment between this point and the next one down the linked list.
+		 *
+		 * Begin by figuring out the latitude and longitude of the endpoints.
+		 */
+		(void)redfearn_inverse(datum, cur_point->x, cur_point->y, utm_zone, &latitude1, &longitude1);
+		(void)redfearn_inverse(datum, (cur_point->point)->x, (cur_point->point)->y, utm_zone, &latitude2, &longitude2);
+//fprintf(stderr, "x=%g, y=%g, zone=%d, lat=%g, long=%g\n", (cur_point->point)->x, (cur_point->point)->y, utm_zone, latitude1, longitude1);
+
+
+		/*
+		 * Find out whether only one endpoint, or both of them, fall
+		 * outside the map area.
+		 */
+		if ((latitude1 < image_corners->sw_lat) || (latitude1 > image_corners->ne_lat) ||
+		    (longitude1 < image_corners->sw_long) || (longitude1 > image_corners->ne_long))  {
+			bothflag++;
+		}
+		if ((latitude2 < image_corners->sw_lat) || (latitude2 > image_corners->ne_lat) ||
+		    (longitude2 < image_corners->sw_long) || (longitude2 > image_corners->ne_long))  {
+			bothflag++;
+		}
+
+
+		/*
+		 * If at least one endpoint of a line segment is outside of the area
+		 * covered by the map image, then interpolate the segment.
+		 *
+		 * This isn't just to catch errors in a DLG file.  Since the user
+		 * can specify arbitrary latitude/longitude boundaries for the
+		 * map image, either or both endpoints of a segment can easily
+		 * be outside of the map boundaries.
+		 */
+		if (bothflag > 0)  {
+			/*
+			 * Construct two equations for the line passing through the two
+			 * endpoints.  These equations can be solved for four potential
+			 * intercepts with the edge of the map area, only zero or two of
+			 * which should be actual intercepts.  (In theory, there can
+			 * be a single intercept at a corner, but this code should find
+			 * it twice.)
+			 *
+			 * We construct the two lines using the classic Y = m * X + b formula,
+			 * where, in one case, we let Y be the latitude and X be the longitude,
+			 * and in the other case they switch roles.
+			 */
+			m_lat = (latitude2 - latitude1) / (longitude2 - longitude1);
+			b_lat = latitude1 - m_lat * longitude1;
+			m_long = 1.0 / m_lat;
+			b_long = longitude1 - m_long * latitude1;
+
+			/*
+			 * We need the distance (in the Manhattan (city-block) metric) between
+			 * the two endpoints.
+			 * It will be used to determine whether one of the intercepts with
+			 * the map edges falls between the two given endpoints.
+			 */
+			d_lat = fabs(latitude1 - latitude2);
+			d_long = fabs(longitude1 - longitude2);
+
+			/*
+			 * Solve the two equations for the four possible intercepts, and check
+			 * that they are truly intercepts.
+			 * Set a flag to remember which points turned out to be intercepts.
+			 */
+			p_lat1 = m_lat * image_corners->sw_long + b_lat;
+			if ((p_lat1 >= image_corners->sw_lat) && (p_lat1 <= image_corners->ne_lat))  {
+				if ((fabs(image_corners->sw_long - longitude1) <= d_long) && (fabs(image_corners->sw_long - longitude2) <= d_long))  {
+					pointflags |= 1;
+				}
+			}
+			p_lat2 = m_lat * image_corners->ne_long + b_lat;
+			if ((p_lat2 >= image_corners->sw_lat) && (p_lat2 <= image_corners->ne_lat))  {
+				if ((fabs(image_corners->ne_long - longitude1) <= d_long) && (fabs(image_corners->ne_long - longitude2) <= d_long))  {
+					pointflags |= 2;
+				}
+			}
+			p_long1 = m_long * image_corners->sw_lat + b_long;
+			if ((p_long1 >= image_corners->sw_long) && (p_long1 <= image_corners->ne_long))  {
+				if ((fabs(image_corners->sw_lat - latitude1) <= d_lat) && (fabs(image_corners->sw_lat - latitude2) <= d_lat))  {
+					pointflags |= 4;
+				}
+			}
+			p_long2 = m_long * image_corners->ne_lat + b_long;
+			if ((p_long2 >= image_corners->sw_long) && (p_long2 <= image_corners->ne_long))  {
+				if ((fabs(image_corners->ne_lat - latitude1) <= d_lat) && (fabs(image_corners->ne_lat - latitude2) <= d_lat))  {
+					pointflags |= 8;
+				}
+			}
+
+			/*
+			 * If both endpoints fall outside the map area, and there aren't exactly two
+			 * intercepts, then there should be none.  (In theory, when a segment
+			 * just touches a corner of the map area, then there is only one intercept,
+			 * but the above code will find the same intercept twice.)
+			 */
+			if ((bothflag == 2) && (pointflags != 3) && (pointflags != 5) && (pointflags != 6) &&
+			    (pointflags != 9) && (pointflags != 10) && (pointflags != 12))  {
+				if (pointflags != 0)  {
+			    		fprintf(stderr, " should have had exactly two intercepts:  0x%x  (%f %f) (%f %f)\n",
+						pointflags, latitude1, longitude1, latitude2, longitude2);
+				}
+				return;
+			}
+
+			/* If the first endpoint is out of range, then replace it with an intercept. */
+			if ((latitude1 < image_corners->sw_lat) || (latitude1 > image_corners->ne_lat) ||
+			    (longitude1 < image_corners->sw_long) || (longitude1 > image_corners->ne_long))  {
+				if (pointflags & 1)  {
+					latitude1 = p_lat1;
+					longitude1 = image_corners->sw_long;
+					pointflags &= ~1;
+					goto DONE1;
+				}
+				if (pointflags & 2)  {
+					latitude1 = p_lat2;
+					longitude1 = image_corners->ne_long;
+					pointflags &= ~2;
+					goto DONE1;
+				}
+				if (pointflags & 4)  {
+					latitude1 = image_corners->sw_lat;
+					longitude1 = p_long1;
+					pointflags &= ~4;
+					goto DONE1;
+				}
+				if (pointflags & 8)  {
+					latitude1 = image_corners->ne_lat;
+					longitude1 = p_long2;
+					pointflags &= ~8;
+					goto DONE1;
+				}
+			}
+DONE1:
+
+			/* If the second endpoint is out of range, then replace it with an intercept. */
+			if ((latitude2 < image_corners->sw_lat) || (latitude2 > image_corners->ne_lat) ||
+			    (longitude2 < image_corners->sw_long) || (longitude2 > image_corners->ne_long))  {
+				if (pointflags & 1)  {
+					latitude2 = p_lat1;
+					longitude2 = image_corners->sw_long;
+					goto DONE2;
+				}
+				if (pointflags & 2)  {
+					latitude2 = p_lat2;
+					longitude2 = image_corners->ne_long;
+					goto DONE2;
+				}
+				if (pointflags & 4)  {
+					latitude2 = image_corners->sw_lat;
+					longitude2 = p_long1;
+					goto DONE2;
+				}
+				if (pointflags & 8)  {
+					latitude2 = image_corners->ne_lat;
+					longitude2 = p_long2;
+					goto DONE2;
+				}
+			}
+DONE2:
+		}
+
+
+
+		/*
+		 * Convert the latitude/longitude pairs into pixel locations within the image.
+		 *
+		 * Note:  because there may be small errors in longitude1, latitude1, longitude2,
+		 * and latitude2, the values of xx1, yy1, xx2, or yy2 may occasionally be off by
+		 * one pixel.
+		 * This appears to be acceptable in the middle of the image, since one pixel
+		 * doesn't amount to much linear distance in the image.  At the edges, one might
+		 * worry that the discrepancy would cause us to go over the image edges.
+		 * However, the interpolation code above should successfully eliminate this
+		 * potential problem.
+		 *
+		 * As noted above, it is okay for the array index values to go to -1, since that
+		 * is the appropriate value for image_corners->sw_long or image_corners->ne_lat.
+		 */
+		xx1 = -1 + round((longitude1 - image_corners->sw_long) * (double)image_corners->x / (image_corners->ne_long - image_corners->sw_long));
+		yy1 = image_corners->y - 1 - round((latitude1 - image_corners->sw_lat) * (double)image_corners->y / (image_corners->ne_lat - image_corners->sw_lat));
+		xx2 = -1 + round((longitude2 - image_corners->sw_long) * (double)image_corners->x / (image_corners->ne_long - image_corners->sw_long));
+		yy2 = image_corners->y - 1 - round((latitude2 - image_corners->sw_lat) * (double)image_corners->y / (image_corners->ne_lat - image_corners->sw_lat));
+		if ((xx1 < -1) || (yy1 < -1) || (xx1 >= image_corners->x) || (yy1 >= image_corners->y))  {
+			fprintf(stderr, "In draw_lines(), a coordinate exceeds the image boundaries, %f %f   %f %f\n", xx1, yy1, xx2, yy2);
+			exit(0);
+		}
+
+
+		/*
+		 * Now all that remains is to draw the line segment.
+		 * We begin by deciding whether x or y is the fastest-changing
+		 * coordinate.
+		 */
+		delta_x = xx2 - xx1;
+		delta_y = yy2 - yy1;
+
+		if (fabs(delta_x) < fabs(delta_y))  {
+			steps = (long)fabs(delta_y) - 1;
+
+			if (delta_y > 0.0)  {
+				delta_x = delta_x / delta_y;
+				delta_y = 1.0;
+			}
+			else if (delta_y < 0.0)  {
+				delta_x = -delta_x / delta_y;
+				delta_y = -1.0;
+			}
+			else  {
+				delta_x = 1.0;
+			}
+		}
+		else  {
+			steps = (long)fabs(delta_x) - 1;
+
+			if (delta_x > 0.0)  {
+				delta_y = delta_y / delta_x;
+				delta_x = 1.0;
+			}
+			else if (delta_x < 0.0)  {
+				delta_y = -delta_y / delta_x;
+				delta_x = -1.0;
+			}
+			else  {
+				delta_y = 1.0;
+			}
+		}
+
+		/* Put dots at the two endpoints. */
+		*(image_corners->ptr + (yy1 + TOP_BORDER) * x_prime + xx1 + LEFT_BORDER) = color;
+		*(image_corners->ptr + (yy2 + TOP_BORDER) * x_prime + xx2 + LEFT_BORDER) = color;
+
+		/* Fill in pixels between the two endpoints. */
+		fxx = xx1;
+		fyy = yy1;
+		for (i = 0; i < steps; i++)  {
+			fxx = fxx + delta_x;
+			fyy = fyy + delta_y;
+			*(image_corners->ptr + (round(fyy) + TOP_BORDER) * x_prime + round(fxx) + LEFT_BORDER) = color;
+		}
+	}
+}
+
+
+
+/*
+ * Fill in an area bounded by a polygon of the given color, beginning at the
+ * given representative point.  (The polygon was previously created by the
+ * line-drawing algorithm.)  The algorithm does this by filling in a given
+ * point and then recursively calling itself to fill in the four nearest neighbors
+ * (to the left, right, top, and bottom).
+ *
+ * Two functions handle this:  fill_area() sets things up, and then
+ * fill_small_area() recursively does the work.  An enterprising reader might
+ * want to convert the recursion into something less likely to consume all
+ * computing resources on the planet.  However, these routines generally
+ * work well unless somehow the representative point falls outside of a bounded
+ * polygon.  (This problem can and does occur, particularly if we aren't using a
+ * one-to-one mapping between DEM elevation samples and image pixels.  Stretching
+ * and scaling can goof things up and, in my experience, more often than not lead
+ * to area fill problems.)
+ * If this happens, then, as the routine attempts to fill large swaths of the image,
+ * the recursion chomps up all available stack memory and the program goes kaboom.
+ * (More commonly, the program doesn't crash, but areas of the image are incorrectly
+ * covered with swaths of blue.)  Less resources would be gobbled if, instead of using
+ * recursion, we simply built a stack datatype, and pushed and popped coordinates
+ * onto/from it.  No program is so perfect that it can't be improved.  However,
+ * the recursion itself is not the problem, but rather the errors that
+ * lead to the wrong areas being filled, and thus to excess recursion.
+ *
+ * One other problem with the approach taken here is that, if a lake has a narrow
+ * neck, the line segments at the sides of the neck may touch.  If this is the case,
+ * then only one side of the lake will be filled in (the side containing the
+ * representative point) because the neck forms a solid boundary.
+ *
+ * Yet another problem is that the representative point may be off the map boundaries
+ * if, say, a lake is at the edge of the map and the whole lake doesn't show up on
+ * the map.  In such a case, the lake won't get filled in because the representative
+ * point is rejected by the sanity-checking code.
+ *
+ * Yet another possible problem (although I have not checked into this) may be that there
+ * may not necessarily be a representative point in each DLG file, when a large
+ * lake spans multiple DLG files.  Thus, the DLG files may depend on a single
+ * representative point, in one of the files, to do duty for all of the chunks
+ * of the lake in all of the relevant files.  This would be a problem for drawmap,
+ * because the program is structured on the assumption that each DLG file can be processed
+ * separately from all of the others.  (Again, I have not verified that this is actually
+ * a problem, I am just pointing it out as a possibility based on some anomalies I
+ * have seen on output maps.)
+ *
+ * This algorithm is very crude at this point.  We assume that the given
+ * coordinates actually do fall within the bounded area that they represent,
+ * something that the DLG guide says is normal for these points, but not guaranteed.
+ * It would appear that a general solution not relying on this assumption would be
+ * difficult to produce.  For a convex bounding polygon, one can determine if the
+ * representative point is within the bounding polygon by following the line segments
+ * around the boundaries of the area and checking that the point is always on the same side
+ * of the line segment (relative to the direction of motion).  However, this wouldn't
+ * do us a whole lot of good.  First, the polygons are not, in general, convex.
+ * Second, unless we change the area fill algorithm in some fundamental way,
+ * knowing a single point (one that is guaranteed to be within the boundaries of the area)
+ * still won't guarantee that the area gets filled properly (see the discussion of a
+ * lake with a neck, above).  Third, knowing that a point is within the boundaries of
+ * the area is not adequate to guarantee that it is within the boundaries drawn on
+ * the image.  The lines drawn around the boundaries are "jagged", because we try
+ * to draw slanted lines using pixels that
+ * can only be placed on a square grid.  (This problem is often called "aliasing,"
+ * which is a reference to Nyquist Sampling Theory; but that is a subject far
+ * beyond the scope of this long, rambling comment block.)  It is theoretically possible
+ * for the representative point to land on a pixel that falls outside the drawn
+ * boundaries, because it just happens to fall at a place where a slanted line
+ * segment "jags."  This problem can be exacerbated when the image is stretched
+ * (for example, when a map area that is 2 degrees of longitude by 1 degree of
+ * latitude is plotted on a 2400 by 2400 pixel grid, thus stretching the latitude
+ * direction by a factor of 2).
+ *
+ * We also assume that the area is totally bounded on the right, left, top, and
+ * bottom by points of the given color (or the edges of the DLG data).  The line-drawing
+ * algorithm, above, should ensure this, as long as the line segments given in the
+ * DLG file don't leave gaps (which they normally don't appear to do).
+ *
+ * There may be some cool, sexy way to write an area-fill algorithm that would
+ * be completely general and would run fast.  However, without giving it a massive
+ * amount of thought, the only truly general algorithms I have come up with are very
+ * slow, involving a detailed check of each candidate point to verify that it is indeed
+ * withing the given area.  As an example, here is a very clunky algorithm that seems
+ * likely to work without running amok:
+ *
+ * Determine which collection(s) of line segments is associated with the given area.
+ *     (Multiple multi-segment "curves" can bound an area, including the neatlines
+ *      that bound the entire area encompassed by the DLG file.)
+ * Follow the line segments around the bounding polygon and break the polygon into
+ *     multiple polygons, each of which is convex.  This can be done by examining the
+ *     angles between successive line segments.
+ * For each convex sub-polygon:
+ *     Find the largest and smallest longitude and latitude associated with all of the
+ *         segments in the sub-polygon.
+ *     Sweep through all points within the rectangle determined by the longitude/latitude
+ *         bounding box and check each point to determine whether it is within the area
+ *         in question.  This can be done by following the line segments around the polygon
+ *         and checking that the point is always on the same side of each segment.  (The
+ *         sign of the line segment identifier(s) determines which side the point is
+ *         supposed to be on.  See the DLG documentation for details.)
+ *
+ * Although there is a lot of handwaving in the above description, it should be obvious
+ * that this algorithm would be incredibly slow.  One could obviously come up with some
+ * ways to speed it up, since it is designed for simplicity of description rather than
+ * efficiency of operation, but it is not immediately obvious how to make it really fast.
+ * Nor is it immediately obvious (at least to me) how to come up with a different algorithm
+ * that would be both robust and fast.  Also, the current version appears to work much of the
+ * time, with occasional inevitable glitches.  Thus, for the time being, we are stuck with
+ * the code that follows.
+ *
+ * The bottom line is that I have never been satisfied with the area fill algorithm, but
+ * I haven't been able to convince myself to put a massive effort into replacing it.
+ * Instead, I usually turn off area-fill entirely, and then use an image editor to fill
+ * in the areas myself.
+ */
+void
+fill_small_area(struct image_corners *image_corners, long x1, long y1, long color)
+{
+	/*
+	 * Check that we have not wandered outside of the area
+	 * covered by the data from this DLG file.
+	 */
+	if ((x1 < dlg_x_low) || (x1 > dlg_x_high) || (y1 < dlg_y_low) || (y1 > dlg_y_high))  {
+		return;
+	}
+
+	/*
+	 * Fill in the given pixel, and recusively fill in the pixels to the
+	 * left, right, top, and bottom.
+	 */
+	*(image_corners->ptr + (y1 + TOP_BORDER) * x_prime + x1 + LEFT_BORDER) = color;
+
+	if (*(image_corners->ptr + (y1 - 1 + TOP_BORDER) * x_prime + x1 + LEFT_BORDER) != color)  {
+		fill_small_area(image_corners, x1, y1 - 1, color);
+	}
+	if (*(image_corners->ptr + (y1 + 1 + TOP_BORDER) * x_prime + x1 + LEFT_BORDER) != color)  {
+		fill_small_area(image_corners, x1, y1 + 1, color);
+	}
+	if (*(image_corners->ptr + (y1 + TOP_BORDER) * x_prime + x1 - 1 + LEFT_BORDER) != color)  {
+		fill_small_area(image_corners, x1 - 1, y1, color);
+	}
+	if (*(image_corners->ptr + (y1 + TOP_BORDER) * x_prime + x1 + 1 + LEFT_BORDER) != color)  {
+		fill_small_area(image_corners, x1 + 1, y1, color);
+	}
+}
+void
+fill_area(struct datum *datum, double px1, double py1, long color, struct image_corners *image_corners)
+{
+	double latitude1, longitude1;
+	long xx1, yy1;
+
+	/* Find the latitude and longitude of the representative point and convert them into index values. */
+	(void)redfearn_inverse(datum, px1, py1, utm_zone, &latitude1, &longitude1);
+
+	xx1 = -1 + round((longitude1 - image_corners->sw_long) * (double)image_corners->x / (image_corners->ne_long - image_corners->sw_long));
+	yy1 = image_corners->y - 1 - round((latitude1 - image_corners->sw_lat) * (double)image_corners->y / (image_corners->ne_lat - image_corners->sw_lat));
+	if ((xx1 < -1) || (xx1 >= image_corners->x) || (yy1 < -1) || (yy1 >= image_corners->y))  {
+/*		fprintf(stderr, "fill_area() was given a starting point outside the map area:  (%d %d) (%f %f)\n", xx1, yy1, latitude1, longitude1); */
+		return;
+	}
+
+	if ((xx1 < dlg_x_low) || (xx1 > dlg_x_high) || (yy1 < dlg_y_low) || (yy1 > dlg_y_high))  {
+		fprintf(stderr, "fill_area() was passed a bad starting point:  (%d %d) (%f %f)\n\tlimits are: %d %d   %d %d\n",
+			xx1, yy1, latitude1, longitude1, dlg_x_low, dlg_x_high, dlg_y_low, dlg_y_high);
+		return;
+	}
+
+
+	/*
+	 * Some debugging code to figure out where the representative point
+	 * for each area falls on the image.
+	 */
+//	{
+//	static h = 0;
+//	long la, lo;
+//	double long_prime = fabs(longitude1) - 0.5;
+//	la = latitude1;
+//	lo = long_prime;
+//	la = la * 10000 + ((int)((latitude1 - la) * 60.0)) * 100 + (int)((latitude1 - la - ((int)((latitude1 - la) * 60.0)) / 60.0) * 3600.0 + 0.5);
+//	lo = lo * 10000 + ((int)((long_prime - lo) * 60.0)) * 100 + (int)((long_prime - lo - ((int)((long_prime - lo) * 60.0)) / 60.0) * 3600.0 + 0.5);
+//	
+//	fprintf(stderr, "lat=%f long=%f     %d %d\n", la, lo, xx1, yy1);
+//	fprintf(stdout, "Area %2.2d                                                                             island   Blaine                                             30005%6.6dN%7.7dW                     %f %f                         \n", h, la, lo, px1, py1);
+//	h++;
+//	
+//	*(image_corners.ptr + (yy1 + TOP_BORDER) * x_prime + xx1 + LEFT_BORDER) = B_GREEN;
+//	return;
+//	}
+
+
+	/*
+	 * Some small areas are so small that the lines around their borders have
+	 * already filled them in.  If the representative point is already set to
+	 * the target color, then we assume we are in such an area.  In such cases,
+	 * we immediately return, because otherwise (if we happen to be sitting
+	 * right on the boundary) we will begin filling in the area outside the
+	 * boundary and potentially fill large swaths of the image.  The risk of
+	 * simply returning (rather than doing a more thorough investigation of
+	 * what is going on) is that the boundary lines may not have actually
+	 * filled the area in, but rather
+	 * that the representative point just happens to fall very near
+	 * (or on) the boundary.  There is not much we can do about this potential
+	 * problem, unless we re-write the whole area-filling algorithm
+	 * (not necessarily a bad idea).  However, in practice, things seem
+	 * to generally work out semi-okay for many of the data sets I have tried.
+	 * I have detected a number of area-fill problems, but haven't done the research
+	 * to determine the individual causes.  Some or all of the unfilled lakes that
+	 * I have found could conveivably have been caused by this test.
+	 */
+	if (*(image_corners->ptr + (yy1 + TOP_BORDER) * x_prime + xx1 + LEFT_BORDER) == color)  {
+		return;
+	}
+
+	/* Recursively call fill_small_area() to do most of the work. */
+	fill_small_area(image_corners, xx1, yy1, color);
+}
+
+
+
+
+/*
+ * Parse the given attribute file and store the results
+ * in the appropriate storage areas.
+ */
+void
+process_attrib(char *attribute_file)
+{
+	int gz_flag;
+	int attribute_fdesc;
+	long ret_val;
+	char *ptr;
+	char buf[MAX_ATTRIB_RECORD_LENGTH];
+
+
+	num_A_attrib = 0;
+	num_L_attrib = 0;
+	if (attribute_file != (char *)0)  {
+		if (strcmp(attribute_file + strlen(attribute_file) - 3, ".gz") == 0)  {
+			gz_flag = 1;
+			if ((attribute_fdesc = buf_open_z(attribute_file, O_RDONLY)) < 0)  {
+				fprintf(stderr, "Can't open %s for reading, errno = %d\n", attribute_file, errno);
+				exit(0);
+			}
+		}
+		else  {
+			gz_flag = 0;
+			if ((attribute_fdesc = buf_open(attribute_file, O_RDONLY)) < 0)  {
+				fprintf(stderr, "Can't open %s for reading, errno = %d\n", attribute_file, errno);
+				exit(0);
+			}
+		}
+
+		fprintf(stderr, "Processing Attribute file:  %s\n", attribute_file);
+
+		while ( 1 )  {
+			if (gz_flag == 0)  {
+				if ((ret_val = get_a_line(attribute_fdesc, buf, MAX_ATTRIB_RECORD_LENGTH)) <= 0)  {
+					break;
+				}
+			}
+			else  {
+				if ((ret_val = get_a_line_z(attribute_fdesc, buf, MAX_ATTRIB_RECORD_LENGTH)) <= 0)  {
+					break;
+				}
+			}
+
+			buf[ret_val - 1] = '\0';	/* Put a null in place of the newline */
+
+			switch(buf[0])  {
+			case '\0':
+			case '\n':
+			case '\r':
+			case ' ':
+			case '\t':
+				/* Blank line, or line that begins with white space.  Ignore. */
+				break;
+			case '#':
+				/* Comment line.  Ignore. */
+				break;
+			case 'N':
+				/* We don't currently use Node attributes, so do nothing with them. */
+				fprintf(stderr, "Ignoring Node attribute:  %s\n", buf);
+				break;
+			case 'A':
+				/* Area attribute. */
+				if (num_A_attrib >= MAX_A_ATTRIB)  {
+					fprintf(stderr, "Out of space for Area attributes, ignoring:  %s\n", buf);
+					break;
+				}
+				attributes_A[num_A_attrib].major = strtol(&buf[1], &ptr, 10);
+				attributes_A[num_A_attrib].minor = strtol(ptr, &ptr, 10);
+				num_A_attrib++;
+				break;
+			case 'L':
+				/* Line attribute. */
+				if (num_L_attrib >= MAX_L_ATTRIB)  {
+					fprintf(stderr, "Out of space for Line attributes, ignoring:  %s\n", buf);
+					break;
+				}
+				attributes_L[num_L_attrib].major = strtol(&buf[1], &ptr, 10);
+				attributes_L[num_L_attrib].minor = strtol(ptr, &ptr, 10);
+				num_L_attrib++;
+				break;
+			default:
+				fprintf(stderr, "Ignoring unknown attribute type:  %s\n", buf);
+				break;
+			}
+		}
+
+		if (gz_flag == 0)  {
+			buf_close(attribute_fdesc);
+		}
+		else  {
+			buf_close_z(attribute_fdesc);
+		}
+	}
+}

Added: packages/drawmap/branches/upstream/current/dlg.h
===================================================================
--- packages/drawmap/branches/upstream/current/dlg.h	                        (rev 0)
+++ packages/drawmap/branches/upstream/current/dlg.h	2007-05-18 12:29:03 UTC (rev 834)
@@ -0,0 +1,129 @@
+/*
+ * =========================================================================
+ * dlg.h - A header file to define parameters for DLG files.
+ * Copyright (c) 2000  Fred M. Erickson
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2, or (at your option)
+ * any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program; if not, write to the Free Software
+ * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
+ * =========================================================================
+ */
+
+
+#define DLG_RECORD_LENGTH		80
+#define MAX_ATTRIB_RECORD_LENGTH	1000
+
+
+#define	HYPSOGRAPHY		20
+#define	HYDROGRAPHY		50
+#define	VEG_SURFACE_COVER	70
+#define	NON_VEG_FEATURES	80
+#define	BOUNDARIES		90
+#define	SURVEY_CONTROL		150
+#define	ROADS_AND_TRAILS	170
+#define	RAILROADS		180
+#define	PIPE_TRANS_LINES	190
+#define	MANMADE_FEATURES	200
+#define	PUBLIC_LAND_SURVEYS	300
+
+#define MAX_A_ATTRIB		100		// Maximum number of Area attributes
+#define MAX_L_ATTRIB		100		// Maximum number of Line attributes
+#define MAX_POLY_NUM		MAX_AREAS	// Maximum number of stored polygon attribute references
+#define MAX_LINE_LIST		2000		// Maximum size of a line list for output.
+#define MAX_EXTRA		8		// Maximum number of attributes per line, area, or node entry
+#define MAX_ATTRIB_FILES	10		// Maximum number of SDTS attribute files that we can read in
+
+#define MAX_LINES 25938		// Theoretical maximum number of lines in a 100K DLG file.  This number may be out of date.
+#define MAX_AREAS 25960		// Theoretical maximum number of areas in a 100K DLG file.  This number may be out of date.
+#define MAX_NODES 25960		// Theoretical maximum number of nodes in a 100K DLG file.  This number may be out of date.
+
+
+/*
+ * For storing linked lists of points.
+ */
+struct point  {
+	double x;
+	double y;
+	struct point *point;
+};
+
+
+/*
+ * Storage for attribute types.
+ */
+struct maj_min {
+	short major;
+	short minor;
+};
+struct attribute  {
+	short major;
+	short minor;
+	struct attribute *attribute;
+};
+
+/*
+ * The sizes of the nodes, areas, and lines arrays are their theoretical maximum values.
+ * It would probably be cooler to malloc() these as we go, but coolness was not an
+ * objective of this program.  It would still be cool to read the maximum values from
+ * the DLG file headers and check them against the values below to verify that
+ * the standards haven't changed and left this program behind.
+ */
+struct nodes  {
+	short id;
+	double x;
+	double y;
+	short number_attrib;
+	struct attribute *attribute;
+};
+
+struct areas  {
+	short id;
+	double x;
+	double y;
+	short number_attrib;
+	struct attribute *attribute;
+};
+
+struct lines  {
+	short id;
+	short start_node;
+	short end_node;
+	short left_area;
+	short right_area;
+	short number_coords;
+	struct point *point;
+	short number_attrib;
+	struct attribute *attribute;
+};
+
+
+/*
+ * Arrays to keep track of attributes from various SDTS files.
+ */
+struct attribute_list  {
+	short major[MAX_EXTRA];
+	short minor[MAX_EXTRA];
+};
+struct polygon_attrib  {
+	short poly_id;
+	long attrib;
+	char module_num;
+};
+
+
+
+void fill_area(struct datum *, double, double, long, struct image_corners *);
+void process_dlg_optional(int, int, struct image_corners *, long);
+long process_dlg_sdts(char *, char *, long, struct image_corners *, long, long);
+void draw_lines(struct datum *, struct point *, long, struct image_corners *);
+void process_attrib(char *);

Added: packages/drawmap/branches/upstream/current/dlg_sdts.c
===================================================================
--- packages/drawmap/branches/upstream/current/dlg_sdts.c	                        (rev 0)
+++ packages/drawmap/branches/upstream/current/dlg_sdts.c	2007-05-18 12:29:03 UTC (rev 834)
@@ -0,0 +1,5477 @@
+/*
+ * =========================================================================
+ * dlg_sdts.c - Routines to handle DLG data from SDTS files.
+ * Copyright (c) 2000,2001  Fred M. Erickson
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2, or (at your option)
+ * any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program; if not, write to the Free Software
+ * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
+ * =========================================================================
+ */
+
+
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <math.h>
+#include <stdlib.h>
+#include <unistd.h>
+#include <stdio.h>
+#include <errno.h>
+#include <time.h>
+#include <string.h>
+#include "drawmap.h"
+#include "dlg.h"
+#include "sdts_utils.h"
+
+
+/*
+ * The routines in this file are uniquely-dedicated to handling
+ * DLG files in the Spatial Data Transfer System (SDTS) format.
+ *
+ * For a general description of SDTS, see sdts_utils.c.
+ */
+
+
+
+/*
+ * Note to the reader of this code.  This code will probably be difficult
+ * to understand unless you are very familiar with the internals of SDTS files
+ * and optional-format DLG-3 files.  Normally I would provide a lot of descriptive
+ * comments to help you along.  However, in this case, such comments would
+ * probably end up being several times the length of the code.  I wrote this
+ * program with two large documents available for reference.  If you want to
+ * follow the operation of the code, you will probably need those documents
+ * too.  The documents were:
+ *
+ * DLG-3 SDTS Transfer Description, May 7, 1996
+ *
+ * Standards for Digital Line Graphs, September 1999,
+ * Department of the Interior, US Geological Survey,
+ * National Mapping Division.
+ *
+ * There are comments at key points in the code, but they are not adequate
+ * for a full understanding unless you have the reference materials at hand.
+ *
+ * Even the documents aren't really enough.  It is also useful to have
+ * both sample SDTS files and sample optional-format DLG-3 files for reference as well.
+ */
+
+
+
+/*
+ * The code that processes DLG files is very spaghetti-like, since
+ * it got squeezed and twisted and stretched while I figured out how
+ * DLG files are put together.
+ *
+ * Because of this, and because I don't like to write functions that
+ * take 35 arguments, there are a lot of global variables used by the
+ * DLG code.  Most of them are accumulated here.
+ *
+ * Many global variables are already allocated by dlg.c.
+ * We re-use them via extern directives.
+ */
+
+
+/*
+ * Storage for attribute types.
+ */
+extern long num_A_attrib;
+extern long num_L_attrib;
+extern struct maj_min attributes_A[MAX_A_ATTRIB];
+extern struct maj_min attributes_L[MAX_L_ATTRIB];
+
+/*
+ * The sizes of the nodes, areas, and lines arrays are their theoretical maximum values.
+ * It would probably be cooler to malloc() these as we go, but coolness was not an
+ * objective of this program.  It would still be cool to read the maximum values from
+ * the DLG file headers and check them against the values below to verify that
+ * the standards haven't changed and left this program behind.
+ */
+extern struct nodes nodes[MAX_NODES];
+extern struct areas areas[MAX_AREAS];
+extern struct lines lines[MAX_LINES];
+
+
+/*
+ * Arrays to keep track of attributes from various SDTS files.
+ */
+static struct polygon_attrib polygon_attrib[MAX_POLY_NUM];
+static struct attrib_files  {
+	char module_name[4];
+	long num_attrib;
+	struct attribute_list *attrib;
+} attrib_files[MAX_ATTRIB_FILES];
+
+/*
+ * Array for building line lists for output
+ */
+static long line_list[MAX_LINE_LIST];
+
+
+/*
+ * comparison function for use with qsort.
+ */
+static int
+compare_lines(const void *lines1, const void *lines2)
+{
+	if (((struct lines *)lines1)->id < ((struct lines *)lines2)->id)  {
+		return -1;
+	}
+	else if (((struct lines *)lines1)->id > ((struct lines *)lines2)->id)  {
+		return 1;
+	}
+	else  {
+		return 0;
+	}
+}
+
+
+extern double lat_se, long_se, lat_sw, long_sw, lat_ne, long_ne, lat_nw, long_nw;
+extern long dlg_x_low, dlg_y_low, dlg_x_high, dlg_y_high;
+extern long x_prime;
+
+extern long utm_zone;
+
+
+
+long get_extra_attrib(long, long *major, long *minor, long *major2, long *minor2, struct subfield *subfield);
+long process_attrib_sdts(char *, char *, long *, long *, long, long);
+void uniq_attrib(struct attribute **, short *);
+void get_theme(char *, char *, long, long);
+
+
+/*
+ * This routine parses informational data from the various SDTS files
+ * comprising a DLG file set and inserts the converted data into
+ * internal variables.
+ * If you haven't read the DLG file guide and looked at a
+ * DLG file, this code will probably be incomprehensible.
+ *
+ * Here are the meanings of the various module names associated with DLG files:
+ *
+ * There is one module associated with Identification:
+ *   IDEN --- Identification
+ *
+ * Misc:
+ *   STAT --- Transfer Statistics
+ *   CATD --- Catalog/Directory
+ *   CATS --- Catalog/Spatial Domain
+ *   CATX --- Catalog/Cross-Reference
+ *
+ * There are five modules involved in data quality:
+ *   DQHL --- Data Quality/Lineage
+ *   DQPA --- Data Quality/Positional Accuracy
+ *   DQAA --- Data Quality/Attribute Accuracy
+ *   DQCG --- Data Quality/Completeness
+ *   DQLC --- Data Quality/Logical Consistency
+ *
+ * There are three data dictionary modules:
+ *   DDSH --- Data Dictionary/Schema
+ *   MDEF --- Data Dictionary/Definition	(Part of Master Data Dictionary)
+ *   MDOM --- Data Dictionary/Domain		(Part of Master Data Dictionary)
+ *
+ * There are three modules associated with spatial reference and domain:
+ *   XREF --- External Spatial Reference
+ *   IREF --- Internal Spatial Reference
+ *
+ * Files associated with data:
+ *   AHDR --- Attribute Primary Header
+ *   A??F --- main Attribute Primary
+ *   ACOI --- Coincidence Attribute Primary
+ *   ABDM --- Agencies for Boundaries
+ *   ARDM --- Route Numbers for Roads and Trails
+ *   AHPR --- Elevation in meters for Hypsography
+ *   B??? --- Secondary attribute files (only used for 2,000,000-scale transfers)
+ *   FF01 --- Composite Surfaces
+ *   LE01 --- Line (Chain of Points)
+ *   NA01 --- Point-Node (Area Points --- Representative points within defined areas.)
+ *   NE01 --- Point-Node (Entity Points --- The location of point features like buildings and towers.)
+ *   NO01 --- Point-Node (Planar Node --- The junction of two or more lines.)
+ *   NP01 --- Point-Node (Registration Points --- This generally defines the four corner points of the data in UTM.
+ *                        The same data is available in AHDR in the form of latitude/longitude.)
+ *   PC01 --- Polygon
+ */
+long
+process_dlg_sdts(char *passed_file_name, char *output_file_name, long gz_flag,
+		struct image_corners *image_corners, long info_flag, long file_image_flag)
+{
+	long i, j, k, l, ret_val;
+	long start_node, current_node;
+	long number_of_islands;
+	long module_num;
+	long layer;
+	long count;
+	long color;
+	long d, m;
+	double s;
+	double x, y;
+	char code1, code2;
+	char output_file[12];
+	int output_fdesc;
+	char *ptr;
+	char buf[DLG_RECORD_LENGTH + 1];
+	char buf3[DLG_RECORD_LENGTH + 1];
+	struct point **current_point;
+	struct point **current_point2;
+	struct point *tmp_point;
+	struct attribute **current_attrib;
+	struct attribute **current_attrib2;
+	struct attribute *tmp_attrib;
+	long attrib;
+	long current_poly;
+	long num_polys;
+	long num_areas = -1;
+	long num_lines = -1;
+	long num_nodes = -1;
+	long num_NO_nodes;
+	long data_type = 0;
+	double latitude1, longitude1, latitude2, longitude2;
+	ssize_t (* read_function)(int, void *, size_t);
+	long plane_ref;
+	char save_byte;
+	int fdesc;
+	long file_name_length;
+	char file_name[MAX_FILE_NAME + 1];
+	long byte_order;
+	long upper_case_flag;
+	long need;
+	double x_scale_factor, y_scale_factor;
+	double x_origin, y_origin;
+	double x_resolution, y_resolution;
+	char postal_code[30];
+	char attribute_file[9];
+	long num_attribs;
+	long num_attrib_files;
+	struct subfield subfield;
+	char source_date[4];
+	char sectional_indicator[3];
+	char category_name[21];
+	long vertical_datum;
+	long horizontal_datum;
+	long dlg_level;
+	long line_list_size;
+	double se_x, se_y, sw_x, sw_y, nw_x, nw_y, ne_x, ne_y;
+	long major;
+	long minor;
+	struct datum datum;
+	long record_id;
+
+
+	if (file_image_flag == 0)  {
+		x_prime = image_corners->x + LEFT_BORDER + right_border;
+	}
+
+
+	/* find the native byte-order on this machine. */
+	byte_order = swab_type();
+
+
+	/*
+	 * Make a copy of the file name.  The one we were originally
+	 * given is still stored in the command line arguments.
+	 * It is probably a good idea not to alter those, lest we
+	 * scribble something we don't want to scribble.
+	 */
+	file_name_length = strlen(passed_file_name);
+	if (file_name_length > MAX_FILE_NAME)  {
+		fprintf(stderr, "File name is too long.\n");
+		return 1;
+	}
+	strncpy(file_name, passed_file_name, MAX_FILE_NAME);
+	file_name[MAX_FILE_NAME] = '\0';
+	if (file_name_length < 12)  {
+		/*
+		 * Excluding the initial path, the file name should have the form
+		 * ????LE??.DDF, perhaps with a ".gz" on the end.  If it isn't
+		 * at least long enough to have this form, then reject it.
+		 */
+		fprintf(stderr, "File name doesn't look right.\n");
+		return 1;
+	}
+	/* Check the case of the characters in the file name by examining a single character. */
+	if (gz_flag == 0)  {
+		if (file_name[file_name_length - 1] == 'f')  {
+			upper_case_flag = 0;
+		}
+		else  {
+			upper_case_flag = 1;
+		}
+	}
+	else  {
+		if (file_name[file_name_length - 4] == 'f')  {
+			upper_case_flag = 0;
+		}
+		else  {
+			upper_case_flag = 1;
+		}
+	}
+
+
+
+	/*
+	 * The first file name we need is the Attribute Primary Header (AHDR) module, which contains
+	 * the latitude/longitude registration points of the four corners of the data.
+	 *
+	 * There is a lot of stuff in AHDR that goes into record 3.
+	 * Thus we provide an extra record buffer so that we can build
+	 * record 3 while processing AHDR.  The buffer is buf3[], and
+	 * before we start we fill it with blanks.
+	 *
+	 * Note:  We fill buffer 3 even when file_image_flag == 0,
+	 * since it doesn't take much processing, and that way we
+	 * don't have to clutter up the code with "if" statements.
+	 */
+	for (i = 0; i < DLG_RECORD_LENGTH; i++)  {
+		buf3[i] = ' ';
+	}
+	if (upper_case_flag == 0)  {
+		if (gz_flag != 0)  {
+			strncpy(&file_name[file_name_length - 11], "ahdr.ddf", 8);
+		}
+		else  {
+			strncpy(&file_name[file_name_length - 8], "ahdr.ddf", 8);
+		}
+	}
+	else  {
+		if (gz_flag != 0)  {
+			strncpy(&file_name[file_name_length - 11], "AHDR.DDF", 8);
+		}
+		else  {
+			strncpy(&file_name[file_name_length - 8], "AHDR.DDF", 8);
+		}
+	}
+	/*
+	 * Open the file in preparation for parsing.
+	 */
+	if (begin_ddf(file_name) < 0)  {
+		fprintf(stderr, "Can't open %s for reading, errno = %d\n", file_name, errno);
+		exit(0);
+	}
+	/*
+	 * We also need the layer number from the file name.
+	 * Some SDTS transfers have multiple LE files, and we need
+	 * to pull the correct record out of the AHDR file.
+	 *
+	 * In theory, we should probably pull this out of the FF01
+	 * module, but this would be a lot of work to get the same
+	 * information.  As long as USGS files are named with the
+	 * layer number in the file name, this approach is more efficient.
+	 */
+	if (gz_flag != 0)  {
+		layer = strtol(&passed_file_name[file_name_length - 9], (char **)0, 10);
+	}
+	else  {
+		layer = strtol(&passed_file_name[file_name_length - 6], (char **)0, 10);
+	}
+	if (layer <= 0)  {
+		fprintf(stderr, "Got bad layer number (%d) from file %s.\n", layer, passed_file_name);
+		return 1;
+	}
+	/*
+	 * Loop through the subfields until we find what we want.
+	 */
+	need = 25;
+	while (get_subfield(&subfield) != 0)  {
+		if (strcmp(subfield.tag, "ATPR") == 0)  {
+			if ((strstr(subfield.format, "I") != (char *)0) && (strncmp(subfield.label, "RCID", 4) == 0))  {
+				/*
+				 * Check for the correct layer.
+				 * set layer = -1 as a flag if you find it.
+				 */
+				save_byte = subfield.value[subfield.length]; subfield.value[subfield.length] = '\0';
+				if (layer == strtol(subfield.value, (char **)0, 10))  {
+					layer = -1;
+				}
+				subfield.value[subfield.length] = save_byte;
+			}
+		}
+		else if ((layer < 0) && (strcmp(subfield.tag, "ATTP") == 0))  {
+			save_byte = subfield.value[subfield.length]; subfield.value[subfield.length] = '\0';
+			if ((strstr(subfield.format, "R") != (char *)0) && (strncmp(subfield.label, "SW_LATITUDE", 11) == 0))  {
+				lat_sw = strtod(subfield.value, (char **)0);
+				need--;
+			}
+			else if ((strstr(subfield.format, "R") != (char *)0) && (strncmp(subfield.label, "SW_LONGITUDE", 12) == 0))  {
+				long_sw = strtod(subfield.value, (char **)0);
+				need--;
+			}
+			else if ((strstr(subfield.format, "R") != (char *)0) && (strncmp(subfield.label, "NW_LATITUDE", 11) == 0))  {
+				lat_nw = strtod(subfield.value, (char **)0);
+				need--;
+			}
+			else if ((strstr(subfield.format, "R") != (char *)0) && (strncmp(subfield.label, "NW_LONGITUDE", 12) == 0))  {
+				long_nw = strtod(subfield.value, (char **)0);
+				need--;
+			}
+			else if ((strstr(subfield.format, "R") != (char *)0) && (strncmp(subfield.label, "NE_LATITUDE", 11) == 0))  {
+				lat_ne = strtod(subfield.value, (char **)0);
+				need--;
+			}
+			else if ((strstr(subfield.format, "R") != (char *)0) && (strncmp(subfield.label, "NE_LONGITUDE", 12) == 0))  {
+				long_ne = strtod(subfield.value, (char **)0);
+				need--;
+			}
+			else if ((strstr(subfield.format, "R") != (char *)0) && (strncmp(subfield.label, "SE_LATITUDE", 11) == 0))  {
+				lat_se = strtod(subfield.value, (char **)0);
+				need--;
+			}
+			else if ((strstr(subfield.format, "R") != (char *)0) && (strncmp(subfield.label, "SE_LONGITUDE", 12) == 0))  {
+				long_se = strtod(subfield.value, (char **)0);
+				need--;
+			}
+			else if ((strstr(subfield.format, "A") != (char *)0) && (strncmp(subfield.label, "BANNER", 6) == 0))  {
+				strncpy(buf, subfield.value, subfield.length);
+				for (i = subfield.length; i < DLG_RECORD_LENGTH; i++)  {
+					buf[i] = ' ';
+				}
+				buf[DLG_RECORD_LENGTH] = '\0';
+				need--;
+			}
+			else if ((strstr(subfield.format, "A") != (char *)0) && (strncmp(subfield.label, "SOURCE_DATE", 11) == 0))  {
+				/*
+				 * Copy the original source date for later use.
+				 */
+				if (subfield.length == 4)  {
+					strncpy(source_date, subfield.value, 4);
+				}
+				else  {
+					strncpy(source_date, "    ", 4);
+				}
+				need--;
+			}
+			else if ((strstr(subfield.format, "A") != (char *)0) && (strncmp(subfield.label, "QUAD_NUMBER", 11) == 0))  {
+				/*
+				 * Copy the sectional indicator for later use.
+				 */
+				if (subfield.length == 3)  {
+					strncpy(sectional_indicator, subfield.value, 3);
+				}
+				else  {
+					strncpy(sectional_indicator, "   ", 3);
+				}
+				need--;
+			}
+			else if ((strstr(subfield.format, "R") != (char *)0) && (strncmp(subfield.label, "L_PRIM_INTERVAL", 15) == 0))  {
+				/*
+				 * Put the largest primary interval into buf3.
+				 * Append the comma separator.
+				 *
+				 * Note:  SDTS stores this in a 5-byte field Real subfield
+				 * but there are only 4 bytes reserved in the DLG-3 header.
+				 * The discrepancy arises from the fact that the value is
+				 * stored in the header as a three-digit integer (the integer
+				 * part of the 5-byte Real), followed by a single digit
+				 * describing the units (which for 24K and 100K DLG files
+				 * is always 2, for meters).
+				 */
+				if ((subfield.length == 5) && (strncmp(subfield.value, "     ", 5) != 0))  {
+					i = (long)strtod(subfield.value, (char **)0);
+					sprintf(&buf3[41], "%3d2,", i);
+				}
+				else  {
+					strncpy(&buf3[41], "     ", 5);
+				}
+				need--;
+			}
+			else if ((strstr(subfield.format, "R") != (char *)0) && (strncmp(subfield.label, "L_PB_INTERVAL", 13) == 0))  {
+				/*
+				 * Put the largest primary bathymetric interval into buf3.
+				 * Append the single-space filler.
+				 *
+				 * Note:  SDTS stores this in a 5-byte field Real subfield
+				 * but there are only 4 bytes reserved in the DLG-3 header.
+				 * The discrepancy arises from the fact that the value is
+				 * stored in the header as a three-digit integer (the integer
+				 * part of the 5-byte Real), followed by a single digit
+				 * describing the units (which for 24K and 100K DLG files
+				 * is always 2, for meters).
+				 */
+				if ((subfield.length == 5) && (strncmp(subfield.value, "     ", 5) != 0))  {
+					i = (long)strtod(subfield.value, (char **)0);
+					sprintf(&buf3[46], "%3d2 ", i);
+				}
+				else  {
+					strncpy(&buf3[46], "     ", 5);
+				}
+				need--;
+			}
+			else if ((strstr(subfield.format, "R") != (char *)0) && (strncmp(subfield.label, "S_PRIM_INTERVAL", 15) == 0))  {
+				/*
+				 * Put the smallest primary interval into buf3.
+				 * Append the comma separator.
+				 *
+				 * Note:  SDTS stores this in a 5-byte field Real subfield
+				 * but there are only 4 bytes reserved in the DLG-3 header.
+				 * The discrepancy arises from the fact that the value is
+				 * stored in the header as a three-digit integer (the integer
+				 * part of the 5-byte Real), followed by a single digit
+				 * describing the units (which for 24K and 100K DLG files
+				 * is always 2, for meters).
+				 */
+				if ((subfield.length == 5) && (strncmp(subfield.value, "     ", 5) != 0))  {
+					i = (long)strtod(subfield.value, (char **)0);
+					sprintf(&buf3[51], "%3d2,", i);
+				}
+				else  {
+					strncpy(&buf3[51], "     ", 5);
+				}
+				need--;
+			}
+			else if ((strstr(subfield.format, "R") != (char *)0) && (strncmp(subfield.label, "S_PB_INTERVAL", 13) == 0))  {
+				/*
+				 * Put the smallest primary bathymetric interval into buf3.
+				 *
+				 * Note:  SDTS stores this in a 5-byte field Real subfield
+				 * but there are only 4 bytes reserved in the DLG-3 header.
+				 * The discrepancy arises from the fact that the value is
+				 * stored in the header as a three-digit integer (the integer
+				 * part of the 5-byte Real), followed by a single digit
+				 * describing the units (which for 24K and 100K DLG files
+				 * is always 2, for meters).
+				 */
+				if ((subfield.length == 5) && (strncmp(subfield.value, "     ", 5) != 0))  {
+					i = (long)strtod(subfield.value, (char **)0);
+					sprintf(&buf3[56], "%3d2", i);
+				}
+				else  {
+					strncpy(&buf3[56], "    ", 4);
+				}
+				need--;
+			}
+			else if ((strstr(subfield.format, "A") != (char *)0) && (strncmp(subfield.label, "CODED_FLAG", 10) == 0))  {
+				/*
+				 * Copy the coded flag into buf3.
+				 * It is preceded by three bytes reserved for future use as coded flags.
+				 * These 3 reserved bytes normally appear to be stored as null characters in USGS DLG-3
+				 * files.  We will do the same, although it is ugly.
+				 */
+				if (subfield.length == 1)  {
+					buf3[63] = subfield.value[0];
+				}
+				else  {
+					buf3[63] = ' ';
+				}
+				buf3[60] = '\0';
+				buf3[61] = '\0';
+				buf3[62] = '\0';
+				need--;
+			}
+			else if ((strstr(subfield.format, "A") != (char *)0) && (strncmp(subfield.label, "EDGEWS", 6) == 0))  {
+				/*
+				 * Copy the west-edge status flag into buf3.
+				 */
+				if (subfield.length == 1)  {
+					buf3[64] = subfield.value[0];
+				}
+				else  {
+					buf3[64] = ' ';
+				}
+				need--;
+			}
+			else if ((strstr(subfield.format, "A") != (char *)0) && (strncmp(subfield.label, "EDGEWR", 6) == 0))  {
+				/*
+				 * Copy the west-edge status flag reason into buf3.
+				 */
+				if (subfield.length == 1)  {
+					buf3[65] = subfield.value[0];
+				}
+				else  {
+					buf3[65] = ' ';
+				}
+				need--;
+			}
+			else if ((strstr(subfield.format, "A") != (char *)0) && (strncmp(subfield.label, "EDGENS", 6) == 0))  {
+				/*
+				 * Copy the north-edge status flag into buf3.
+				 */
+				if (subfield.length == 1)  {
+					buf3[66] = subfield.value[0];
+				}
+				else  {
+					buf3[66] = ' ';
+				}
+				need--;
+			}
+			else if ((strstr(subfield.format, "A") != (char *)0) && (strncmp(subfield.label, "EDGENR", 6) == 0))  {
+				/*
+				 * Copy the north-edge status flag reason into buf3.
+				 */
+				if (subfield.length == 1)  {
+					buf3[67] = subfield.value[0];
+				}
+				else  {
+					buf3[67] = ' ';
+				}
+				need--;
+			}
+			else if ((strstr(subfield.format, "A") != (char *)0) && (strncmp(subfield.label, "EDGEES", 6) == 0))  {
+				/*
+				 * Copy the east-edge status flag into buf3.
+				 */
+				if (subfield.length == 1)  {
+					buf3[68] = subfield.value[0];
+				}
+				else  {
+					buf3[68] = ' ';
+				}
+				need--;
+			}
+			else if ((strstr(subfield.format, "A") != (char *)0) && (strncmp(subfield.label, "EDGEER", 6) == 0))  {
+				/*
+				 * Copy the east-edge status flag reason into buf3.
+				 */
+				if (subfield.length == 1)  {
+					buf3[69] = subfield.value[0];
+				}
+				else  {
+					buf3[69] = ' ';
+				}
+				need--;
+			}
+			else if ((strstr(subfield.format, "A") != (char *)0) && (strncmp(subfield.label, "EDGESS", 6) == 0))  {
+				/*
+				 * Copy the south-edge status flag into buf3.
+				 */
+				if (subfield.length == 1)  {
+					buf3[70] = subfield.value[0];
+				}
+				else  {
+					buf3[70] = ' ';
+				}
+				need--;
+			}
+			else if ((strstr(subfield.format, "A") != (char *)0) && (strncmp(subfield.label, "EDGESR", 6) == 0))  {
+				/*
+				 * Copy the south-edge status flag reason into buf3.
+				 */
+				if (subfield.length == 1)  {
+					buf3[71] = subfield.value[0];
+				}
+				else  {
+					buf3[71] = ' ';
+				}
+				need--;
+			}
+			else if ((strstr(subfield.format, "A") != (char *)0) && (strncmp(subfield.label, "VERTICAL_DATUM", 14) == 0))  {
+				/*
+				 * Convert and save the vertical datum for later use.
+				 */
+				if (strncmp(subfield.value, "NGVD", (subfield.length > 4 ? 4 : subfield.length)) == 0)  {
+					vertical_datum = 0;
+				}
+				else if (strncmp(subfield.value, "NAVD", (subfield.length > 4 ? 4 : subfield.length)) == 0)  {
+					vertical_datum = 1;
+				}
+				else if (strncmp(subfield.value, "LOCAL MEAN SEA LEVEL", subfield.length) == 0)  {
+					vertical_datum = 2;
+				}
+				else  {
+					vertical_datum = -1;
+				}
+				need--;
+			}
+			subfield.value[subfield.length] = save_byte;
+			if (need == 0)  {
+				/* This is all we need.  Break out of the loop. */
+				break;
+			}
+		}
+	}
+	/* We are done with this file, so close it. */
+	end_ddf();
+	/* Check that we found what we wanted. */
+	if (need > 0)  {
+		fprintf(stderr, "Failed to get needed data from file %s.\n", file_name);
+		return 1;
+	}
+	else if (need < 0)  {
+		fprintf(stderr, "Warning:  Got more data from file %s than expected.\n", file_name);
+	}
+
+
+	/*
+	 * We now have enough information to open the output file.
+	 *
+	 * file_image_flag controls whether this routine writes to a file (file_image_flag != 0)
+	 * or writes to the drawmap image buffer (file_image_flag == 0).  This is ugly, but
+	 * it lets us maintain only a single version of this SDTS parsing code.
+	 */
+	if (file_image_flag != 0)  {
+		if (output_file_name != (char *)0)  {
+			if ((output_fdesc = open(output_file_name, O_WRONLY | O_CREAT | O_EXCL, 0644)) < 0)  {
+				fprintf(stderr, "Can't create %s for writing, errno = %d\n", output_file_name, errno);
+				exit(0);
+			}
+		}
+		else  {
+			code1 = 'a' + floor((fabs(lat_se) + (lat_se < 0 ? -1.0 : 1.0) * 0.02 - floor(fabs(lat_se) + (lat_se < 0 ? -1.0 : 1.0) * 0.02)) * 8.0);
+			code2 = '1' + floor((fabs(long_se) + (long_se < 0 ? 1.0 : -1.0) * 0.02 - floor(fabs(long_se) + (long_se < 0 ? 1.0 : -1.0) * 0.02)) * 8.0);
+			sprintf(output_file, "%02.2d%03.3d%c%c.dlg",
+				(int)(fabs(lat_se) + (lat_se < 0 ? -1.0 : 1.0) * 0.02),
+				(int)(fabs(long_se) + (long_se < 0 ? 1.0 : -1.0) * 0.02),
+				code1, code2);
+			if ((output_fdesc = open(output_file, O_WRONLY | O_CREAT | O_EXCL, 0644)) < 0)  {
+				fprintf(stderr, "Can't create %s for writing, errno = %d\n", output_file, errno);
+				exit(0);
+			}
+		}
+		/*
+		 * The first record of the file is simply the banner.
+		 * We stored the banner in buf[].  Write it out.
+		 */
+		if (write(output_fdesc, buf, DLG_RECORD_LENGTH) != DLG_RECORD_LENGTH)  {
+			fprintf(stderr, "Failed to write output file.  errno = %d.\n", errno);
+			exit(0);
+		}
+	}
+
+
+
+	/*
+	 * The next file we need is the IDEN module, which contains the official DLG
+	 * name.  The name is normally followed (in SDTS) by " / " and a category name (theme),
+	 * such as HYDROGRAPHY.  We need to strip off this extra category bit, since it isn't
+	 * normally part of an optional-format record 1.  However, we also need to save
+	 * it because it shows up at the beginning of record 15.  We also want to
+	 * extract the postal code if the user has given the info_flag.
+	 *
+	 * Here is a typical SDTS TITL subfield:
+	 *
+	 * ALZADA, MT-SD-WY / BOUNDARIES
+	 */
+	if (upper_case_flag == 0)  {
+		if (gz_flag != 0)  {
+			strncpy(&file_name[file_name_length - 11], "iden.ddf", 8);
+		}
+		else  {
+			strncpy(&file_name[file_name_length - 8], "iden.ddf", 8);
+		}
+	}
+	else  {
+		if (gz_flag != 0)  {
+			strncpy(&file_name[file_name_length - 11], "IDEN.DDF", 8);
+		}
+		else  {
+			strncpy(&file_name[file_name_length - 8], "IDEN.DDF", 8);
+		}
+	}
+	/*
+	 * Open the file in preparation for parsing.
+	 */
+	if (begin_ddf(file_name) < 0)  {
+		fprintf(stderr, "Can't open %s for reading, errno = %d\n", file_name, errno);
+		exit(0);
+	}
+	/*
+	 * Loop through the subfields until we find what we want.
+	 */
+	need = 3;
+	while (get_subfield(&subfield) != 0)  {
+		if (strcmp(subfield.tag, "IDEN") == 0)  {
+			if ((strstr(subfield.format, "A") != (char *)0) && (strncmp(subfield.label, "TITL", 4) == 0))  {
+				for (i = 0; i < subfield.length; i++)  { 
+					if (subfield.value[i] == '/')  { 
+						j = i;
+						break;
+					}
+				}
+				if (i < subfield.length)  {
+					while ((i - 1) >= 0)  {
+						i--;
+						if (subfield.value[i] != ' ')  {
+							i++;
+							break;
+						}
+					}
+				}
+				if (info_flag != 0)  {
+					/*
+					 * Grab the postal code.
+					 */
+					for (k = 0; k < i; k++)  {
+						if (subfield.value[k] == ',')  {
+							break;
+						}
+					}
+					if (k < i)  {
+						k++;
+						for ( ; k < i; k++)  {
+							if (k != ' ')  {
+								break;
+							}
+						}
+						strncpy(postal_code, &subfield.value[k], (i - k + 1) > 29 ? 29 : (i - k + 1));
+						postal_code[(i - k + 1) > 29 ? 29 : (i - k + 1)] = '\0';
+					}
+					else  {
+						postal_code[0] = 0;
+					}
+				}
+				/*
+				 * Copy the official DLG name into the beginning of the
+				 * record buffer.
+				 * It is followed by a 1-byte space filler.
+				 */
+				if (i > 40)  {
+					/* Truncate the name if it is too long.  It shouldn't be. */
+					i = 40;
+				}
+				strncpy(buf, subfield.value, i);
+				for ( ; i < 41; i++)  {
+					buf[i] = ' ';
+				}
+				j++;
+				while (j < subfield.length)  {
+					if (subfield.value[j] != ' ')  {
+						break;
+					}
+					j++;
+				}
+				if (j != subfield.length)  {
+					if ((subfield.length - j) <= 20)  {
+						strncpy(category_name, &subfield.value[j], subfield.length - j);
+						category_name[subfield.length - j] = '\0';
+					}
+					else  {
+						strncpy(category_name, &subfield.value[j], 20);
+						category_name[20] = '\0';
+					}
+				}
+				else  {
+					category_name[0] = '\0';
+				}
+				for (i = strlen(category_name) - 1; i >= 0; i--)  {
+					if (category_name[i] != ' ')  {
+						category_name[i + 1] = '\0';
+						break;
+					}
+				}
+				if (i == -1)  {
+					category_name[0] = '\0';
+				}
+				need--;
+			}
+			else if ((strstr(subfield.format, "I") != (char *)0) && (strncmp(subfield.label, "SCAL", 4) == 0))  {
+				/*
+				 * Copy the source material scale into the
+				 * record buffer.
+				 * It is followed by space filler out to byte 63.
+				 */
+				if (subfield.length >= 8)  {
+					strncpy(&buf[52], subfield.value, 8);
+				}
+				else  {
+					strncpy(&buf[52], "        ", 8);
+					strncpy(&buf[52 + 8 - subfield.length], subfield.value, subfield.length);
+				}
+				for (i = 60; i < 63; i++)  {
+					buf[i] = ' ';
+				}
+				need--;
+			}
+			else if ((strstr(subfield.format, "A") != (char *)0) && (strncmp(subfield.label, "DAST", 4) == 0))  {
+				/*
+				 * Save the DLG level for later use.
+				 */
+				if (subfield.length == 5)  {
+					dlg_level = subfield.value[4] - '0';
+				}
+				else  {
+					dlg_level = -1;
+				}
+				if ((dlg_level != 3) && (dlg_level != 2))  {
+					fprintf(stderr, "Warning:  This does not appear to be a level 2 or 3 DLG.\n");
+				}
+				need--;
+			}
+		}
+		if (need == 0)  {
+			/* This is all we need.  Break out of the loop. */
+			break;
+		}
+	}
+	/* We are done with this file, so close it. */
+	end_ddf();
+	/* Check that we found what we wanted. */
+	if (need > 0)  {
+		fprintf(stderr, "Failed to get needed data from file %s.\n", file_name);
+		return 1;
+	}
+	else if (need < 0)  {
+		fprintf(stderr, "Warning:  Got more data from file %s than expected.\n", file_name);
+	}
+	if (file_image_flag != 0)  {
+		/*
+		 * Copy the source date into the buffer.
+		 * It is followed by blank filler out to byte 52.
+		 */
+		strncpy(&buf[41], source_date, 4);
+		buf[45] = ',';
+		for (i = 46; i < 52; i++)  {
+			buf[i] = ' ';
+		}
+		/*
+		 * Copy the sectional indicator into the buffer.
+		 * It is followed by blank filler out to the end of the record.
+		 */
+		strncpy(&buf[63], sectional_indicator, 3);
+		for (i = 66; i < DLG_RECORD_LENGTH; i++)  {
+			buf[i] = ' ';
+		}
+		/*
+		 * The second record of the file is set up.
+		 * Write it out.
+		 *
+		 * Also, we completely built record 3 while processing the ADHR
+		 * module.  Write it out as well.
+		 */
+		if (write(output_fdesc, buf, DLG_RECORD_LENGTH) != DLG_RECORD_LENGTH)  {
+			fprintf(stderr, "Failed to write output file.  errno = %d.\n", errno);
+			exit(0);
+		}
+		if (write(output_fdesc, buf3, DLG_RECORD_LENGTH) != DLG_RECORD_LENGTH)  {
+			fprintf(stderr, "Failed to write output file.  errno = %d.\n", errno);
+			exit(0);
+		}
+	}
+
+
+	
+	/*
+	 * As a "just-in-case" measure, we grabbed the category name (theme) from
+	 * the DLG name above.  This would be fine, for everything except TRANSPORTATION.
+	 * In TRANSPORTATION files, the name we grabbed above will be "TRANSPORTATION".
+	 * However, what we really need is one of the three themes:  "ROADS AND TRAILS",
+	 * "RAILROADS", or "PIPE & TRANS LINES".  In order to get this information,
+	 * we have at least two possible routes of attack.  We can try to deduce it from
+	 * the types of attributes referenced in the Node/Area/Lines records.  Or, we
+	 * can read the CATS module and try to get the THEM subfield (which stands for theme)
+	 * for our passed_file_name.  We take the latter course here, but we will also
+	 * try the other approach inside the process_attrib_sdts() function.
+	 */
+	get_theme(passed_file_name, category_name, upper_case_flag, gz_flag);
+
+
+
+	if (info_flag != 0)  {
+		/* If info_flag is nonzero, then all we need to do is print some info and return. */
+		for (i = 0; i < 40; i++)  {
+			/* We want only that part of the official name up to the comma. */
+			if (buf[i] == ',')  {
+				break;
+			}
+		}
+		fprintf(stdout, "\t%.*s\t%s\t%.20s\t%g:%g:%g:%g\n", i, buf, postal_code, category_name, lat_se, long_se, lat_nw, long_nw);
+		return 0;
+	}
+
+
+
+	/*
+	 * Within the DLG data, locations are specified with pairs of
+	 * Universal Transverse Mercator (x,y) coordinates.
+	 *
+	 * The header information for the DLG data gives 4 reference
+	 * points for the corners of the polygon represented by the DLG data.  Here is a
+	 * typical set of them:
+	 *
+	 *	SW       45.750000 -112.000000         422218.03  5066539.80                    
+	 *	NW       46.000000 -112.000000         422565.07  5094315.16                    
+	 *	NE       46.000000 -111.750000         441923.83  5094103.38                    
+	 *	SE       45.750000 -111.750000         441663.14  5066327.07                    
+	 *
+	 * Note that the latitude-longitude points form a square area in latitude/longitude
+	 * space (if latitudes and longitudes on a pseudo-sphere can ever be thought of as
+	 * forming a square).  The UTM (x,y) grid coordinates, however, form a quadrilateral
+	 * in which no two sides have the same length.
+	 *
+	 * If file_image_flag == 0, then we are supposed to be writing to the image buffer.
+	 * There is no point in this if the data won't affect the image.
+	 * Do a quick check here to find out if the data is off the map boundaries.
+	 * If so, then we can return now and save a lot of work.
+	 */
+	if (file_image_flag == 0)  {
+		if ((lat_sw > image_corners->ne_lat) ||
+		    (long_sw > image_corners->ne_long) ||
+		    (lat_ne < image_corners->sw_lat) ||
+		    (long_ne < image_corners->sw_long))  {
+			return 0;
+		}
+	}
+
+
+	num_attrib_files = process_attrib_sdts(passed_file_name, category_name, &data_type, &color, gz_flag, upper_case_flag);
+
+
+
+	/*
+	 * The next file name we need is the XREF module, which contains
+	 * the Planar Reference System, Horizontal Datum, and UTM Zone.
+	 */
+	if (upper_case_flag == 0)  {
+		if (gz_flag != 0)  {
+			strncpy(&file_name[file_name_length - 11], "xref.ddf", 8);
+		}
+		else  {
+			strncpy(&file_name[file_name_length - 8], "xref.ddf", 8);
+		}
+	}
+	else  {
+		if (gz_flag != 0)  {
+			strncpy(&file_name[file_name_length - 11], "XREF.DDF", 8);
+		}
+		else  {
+			strncpy(&file_name[file_name_length - 8], "XREF.DDF", 8);
+		}
+	}
+	/*
+	 * Open the file in preparation for parsing.
+	 */
+	if (begin_ddf(file_name) < 0)  {
+		fprintf(stderr, "Can't open %s for reading, errno = %d\n", file_name, errno);
+		exit(0);
+	}
+	/*
+	 * Loop through the subfields until we find what we want.
+	 */
+	need = 3;
+	while (get_subfield(&subfield) != 0)  {
+		if (strcmp(subfield.tag, "XREF") == 0)  {
+			if ((strstr(subfield.format, "A") != (char *)0) && (strcmp(subfield.label, "HDAT") == 0))  {
+				/*
+				 * Valid choices are "NAS" for NAD-27, "NAX" for NAD-83,
+				 * "Puerto Rico", "Old Hawaiian", and "Local (Astro)".
+				 */
+				if (strncmp(subfield.value, "NAS", subfield.length) == 0)  {
+					horizontal_datum = 0;
+				}
+				else if (strncmp(subfield.value, "NAX", subfield.length) == 0)  {
+					horizontal_datum = 1;
+				}
+				else if (strncmp(subfield.value, "Puerto Rico", subfield.length) == 0)  {
+					horizontal_datum = 2;
+				}
+				else if (strncmp(subfield.value, "Old Hawaiian", subfield.length) == 0)  {
+					horizontal_datum = 3;
+				}
+				else if (strncmp(subfield.value, "Local (Astro)", subfield.length) == 0)  {
+					horizontal_datum = 4;
+				}
+				else  {
+					horizontal_datum = -1;
+				}
+				need--;
+			}
+			else if ((strstr(subfield.format, "A") != (char *)0) && (strcmp(subfield.label, "RSNM") == 0))  {
+				/*
+				 * Valid choices are "UTM" and "GEO"
+				 */
+				if (strncmp(subfield.value, "GEO", subfield.length) == 0)  {
+					plane_ref = 0;
+				}
+				else if (strncmp(subfield.value, "UTM", subfield.length) == 0)  {
+					plane_ref = 1;
+				}
+				else  {
+					plane_ref = -1;
+				}
+				need--;
+			}
+			else if ((strstr(subfield.format, "A") != (char *)0) && (strcmp(subfield.label, "ZONE") == 0))  {
+				save_byte = subfield.value[subfield.length]; subfield.value[subfield.length] = '\0';
+				utm_zone = strtol(subfield.value, (char **)0, 10);
+				subfield.value[subfield.length] = save_byte;
+				need--;
+			}
+			if (need == 0)  {
+				/* This is all we need.  Break out of the loop. */
+				break;
+			}
+		}
+	}
+	/* We are done with this file, so close it. */
+	end_ddf();
+	/* Check that we found what we wanted. */
+	if (need > 0)  {
+		fprintf(stderr, "Failed to get needed data from file %s.\n", file_name);
+		return 1;
+	}
+	else if (need < 0)  {
+		fprintf(stderr, "Warning:  Got more data from file %s than expected.\n", file_name);
+	}
+
+	/* Check the just-acquired results. */
+	if (plane_ref != 1)  {
+		fprintf(stderr, "DLG file does not use UTM ground planimetric coordinates.  (Plane_ref = %d)\n", plane_ref);
+		exit(0);
+	}
+	if ((utm_zone < 1) || (utm_zone > 60))  {
+		fprintf(stderr, "DLG file contains bad UTM zone %d.\n", utm_zone);
+		exit(0);
+	}
+	if (horizontal_datum == 0)  {
+		/*
+		 * The file uses the NAD-27 datum.
+		 * Initialize the datum parameters.
+		 */
+		datum.a = NAD27_SEMIMAJOR;
+		datum.b = NAD27_SEMIMINOR;
+		datum.e_2 = NAD27_E_SQUARED;
+		datum.f_inv = NAD27_F_INV;
+		datum.k0 = UTM_K0;
+		datum.a0 = NAD27_A0;
+		datum.a2 = NAD27_A2;
+		datum.a4 = NAD27_A4;
+		datum.a6 = NAD27_A6;
+	}
+	else if (horizontal_datum == 1)  {
+		/*
+		 * The file uses the NAD-83 datum.
+		 * Initialize the datum parameters.
+		 */
+		datum.a = NAD83_SEMIMAJOR;
+		datum.b = NAD83_SEMIMINOR;
+		datum.e_2 = NAD83_E_SQUARED;
+		datum.f_inv = NAD83_F_INV;
+		datum.k0 = UTM_K0;
+		datum.a0 = NAD83_A0;
+		datum.a2 = NAD83_A2;
+		datum.a4 = NAD83_A4;
+		datum.a6 = NAD83_A6;
+	}
+	else  {
+		/*
+		 * We don't currently handle any other datums.
+		 * Default to tne NAD-27 datum.
+		 */
+		datum.a = NAD27_SEMIMAJOR;
+		datum.b = NAD27_SEMIMINOR;
+		datum.e_2 = NAD27_E_SQUARED;
+		datum.f_inv = NAD27_F_INV;
+		datum.k0 = UTM_K0;
+		datum.a0 = NAD27_A0;
+		datum.a2 = NAD27_A2;
+		datum.a4 = NAD27_A4;
+		datum.a6 = NAD27_A6;
+
+		fprintf(stderr, "DLG file uses a horizontal datum that I don't handle (%d).\n", horizontal_datum);
+		fprintf(stderr, "Defaulting to NAD-27.  This may result in positional errors in the data.\n");
+	}
+
+
+
+	/*
+	 * The next file name we need is the IREF module, which contains
+	 * the x and y scale factors that are multiplied times the x and y UTM coordinate values,
+	 * the x and y origins, which are added to the UTM coordinate values, and the
+	 * x and y horizontal resolutions.
+	 */
+	if (upper_case_flag == 0)  {
+		if (gz_flag != 0)  {
+			strncpy(&file_name[file_name_length - 11], "iref.ddf", 8);
+		}
+		else  {
+			strncpy(&file_name[file_name_length - 8], "iref.ddf", 8);
+		}
+	}
+	else  {
+		if (gz_flag != 0)  {
+			strncpy(&file_name[file_name_length - 11], "IREF.DDF", 8);
+		}
+		else  {
+			strncpy(&file_name[file_name_length - 8], "IREF.DDF", 8);
+		}
+	}
+	/*
+	 * Open the file in preparation for parsing.
+	 */
+	if (begin_ddf(file_name) < 0)  {
+		fprintf(stderr, "Can't open %s for reading, errno = %d\n", file_name, errno);
+		exit(0);
+	}
+	/*
+	 * Loop through the subfields until we find what we want.
+	 */
+	need = 6;
+	while (get_subfield(&subfield) != 0)  {
+		if (strcmp(subfield.tag, "IREF") == 0)  {
+			save_byte = subfield.value[subfield.length]; subfield.value[subfield.length] = '\0';
+			if ((strstr(subfield.format, "R") != (char *)0) && (strcmp(subfield.label, "SFAX") == 0))  {
+				x_scale_factor = strtod(subfield.value, (char **)0);
+				need--;
+			}
+			else if ((strstr(subfield.format, "R") != (char *)0) && (strcmp(subfield.label, "SFAY") == 0))  {
+				y_scale_factor = strtod(subfield.value, (char **)0);
+				need--;
+			}
+			else if ((strstr(subfield.format, "R") != (char *)0) && (strcmp(subfield.label, "XORG") == 0))  {
+				x_origin = strtod(subfield.value, (char **)0);
+				need--;
+			}
+			else if ((strstr(subfield.format, "R") != (char *)0) && (strcmp(subfield.label, "YORG") == 0))  {
+				y_origin = strtod(subfield.value, (char **)0);
+				need--;
+			}
+			else if ((strstr(subfield.format, "R") != (char *)0) && (strcmp(subfield.label, "XHRS") == 0))  {
+				x_resolution = strtod(subfield.value, (char **)0);
+				need--;
+			}
+			else if ((strstr(subfield.format, "R") != (char *)0) && (strcmp(subfield.label, "YHRS") == 0))  {
+				y_resolution = strtod(subfield.value, (char **)0);
+				need--;
+			}
+			subfield.value[subfield.length] = save_byte;
+			if (need == 0)  {
+				/* This is all we need.  Break out of the loop. */
+				break;
+			}
+		}
+	}
+	/* We are done with this file, so close it. */
+	end_ddf();
+	/* Check that we found what we wanted. */
+	if (need > 0)  {
+		fprintf(stderr, "Failed to get needed data from file %s.\n", file_name);
+		return 1;
+	}
+	else if (need < 0)  {
+		fprintf(stderr, "Warning:  Got more data from file %s than expected.\n", file_name);
+	}
+
+
+
+	/*
+	 * Parse with the NP?? file, which contains the UTM x/y
+	 * coordinates of the four corner registration points.
+	 */
+	strncpy(file_name, passed_file_name, MAX_FILE_NAME);
+	if (upper_case_flag == 0)  {
+		if (gz_flag != 0)  {
+			file_name[file_name_length - 11] = 'n';
+			file_name[file_name_length - 10] = 'p';
+		}
+		else  {
+			file_name[file_name_length -  8] = 'n';
+			file_name[file_name_length -  7] = 'p';
+		}
+	}
+	else  {
+		if (gz_flag != 0)  {
+			file_name[file_name_length - 11] = 'N';
+			file_name[file_name_length - 10] = 'P';
+		}
+		else  {
+			file_name[file_name_length -  8] = 'N';
+			file_name[file_name_length -  7] = 'P';
+		}
+	}
+	/*
+	 * Open the file in preparation for parsing.
+	 */
+	if (begin_ddf(file_name) < 0)  {
+		fprintf(stderr, "Can't open %s for reading, errno = %d\n", file_name, errno);
+		exit(0);
+	}
+	/*
+	 * Loop through the subfields until we find what we want.
+	 */
+	record_id = -1;
+	need = 8;
+	while (get_subfield(&subfield) != 0)  {
+		if (strcmp(subfield.tag, "PNTS") == 0)  {
+			if ((strstr(subfield.format, "I") != (char *)0) && (strcmp(subfield.label, "RCID") == 0))  {
+				save_byte = subfield.value[subfield.length]; subfield.value[subfield.length] = '\0';
+				record_id = strtol(subfield.value, (char **)0, 10);
+				subfield.value[subfield.length] = save_byte;
+			}
+		}
+		else if (strcmp(subfield.tag, "SADR") == 0)  {
+			if ((strstr(subfield.format, "B") != (char *)0) && (strcmp(subfield.label, "X") == 0))  {
+				if (subfield.length != 4)  {
+					/* Error */
+					x = -1.0;
+				}
+				else  {
+					i = (((long)subfield.value[3] & 0xff) << 24) |
+					    (((long)subfield.value[2] & 0xff) << 16) |
+					    (((long)subfield.value[1] & 0xff) <<  8) |
+					     ((long)subfield.value[0] & 0xff);
+					if (byte_order == 0)  {
+						x = (double)i * x_scale_factor + x_origin;
+					}
+					else if (byte_order == 1)  {
+						LE_SWAB(&i);
+						x = (double)i * x_scale_factor + x_origin;
+					}
+					else if (byte_order == 2)  {
+						PDP_SWAB(&i);
+						x = (double)i * x_scale_factor + x_origin;
+					}
+				}
+				switch (record_id)  {
+				case 1:
+					sw_x = x;
+					break;
+				case 2:
+					nw_x = x;
+					break;
+				case 3:
+					ne_x = x;
+					break;
+				case 4:
+					se_x = x;
+					break;
+				default:
+					fprintf(stderr, "Problem parsing NP?? module record %d in file %s.\n", record_id, file_name);
+					break;
+				}
+				need--;
+			}
+			else if ((strstr(subfield.format, "B") != (char *)0) && (strcmp(subfield.label, "Y") == 0))  {
+				if (subfield.length != 4)  {
+					/* Error */
+					y = -1.0;
+				}
+				else  {
+					i = (((long)subfield.value[3] & 0xff) << 24) |
+					    (((long)subfield.value[2] & 0xff) << 16) |
+					    (((long)subfield.value[1] & 0xff) <<  8) |
+					     ((long)subfield.value[0] & 0xff);
+					if (byte_order == 0)  {
+						y = (double)i * y_scale_factor + y_origin;
+					}
+					else if (byte_order == 1)  {
+						LE_SWAB(&i);
+						y = (double)i * y_scale_factor + y_origin;
+					}
+					else if (byte_order == 2)  {
+						PDP_SWAB(&i);
+						y = (double)i * y_scale_factor + y_origin;
+					}
+				}
+				switch (record_id)  {
+				case 1:
+					sw_y = y;
+					break;
+				case 2:
+					nw_y = y;
+					break;
+				case 3:
+					ne_y = y;
+					break;
+				case 4:
+					se_y = y;
+					break;
+				default:
+					fprintf(stderr, "Problem parsing NP?? module record %d in file %s.\n", record_id, file_name);
+					break;
+				}
+				need--;
+			}
+		}
+		if (need == 0)  {
+			/* This is all we need.  Break out of the loop. */
+			break;
+		}
+	}
+	/* We are done with this file, so close it. */
+	end_ddf();
+	/* Check that we found what we wanted. */
+	if (need > 0)  {
+		fprintf(stderr, "Failed to get needed data from file %s.\n", file_name);
+		return 1;
+	}
+	else if (need < 0)  {
+		fprintf(stderr, "Warning:  Got more data from file %s than expected.\n", file_name);
+	}
+
+
+
+	/*
+	 * We are now in a position to write a bunch more records to the output file.
+	 */
+	if (file_image_flag != 0)  {
+		/*
+		 * Prepare and write record 4.
+		 * The 2 code says that the file uses meters to measure horizontal distance.
+		 * (There is no other choice.)
+		 * The first 4 code is the number of file to map transformation paramters.
+		 * (There is no other choice.)
+		 * The 0 code is the number of accuracy/miscellaneous records.
+		 * (There is no other choice.)
+		 * The second 4 code is the number of control points.
+		 * (There is no other choice.)
+		 * The 1 code is the number of categories in the DLG file.
+		 * (I assume this means hydrography versus transportation versus whatever)
+		 * (There is no other choice.)
+		 */
+		sprintf(buf, "%6d%6d%6d%6d% 18.11E%6d%6d%6d%6d%3d%3d        ",
+				dlg_level, plane_ref, utm_zone, 2, x_resolution, 4, 0, 4, 1, horizontal_datum, vertical_datum);
+		for (i = 24; i < 42; i++)  {
+			if (buf[i] == 'E')  buf[i] = 'D';	// USGS files use 'D' for exponentiation.
+		}
+		if (write(output_fdesc, buf, DLG_RECORD_LENGTH) != DLG_RECORD_LENGTH)  {
+			fprintf(stderr, "Failed to write output file.  errno = %d.\n", errno);
+			exit(0);
+		}
+
+
+		/*
+		 * Records 5 through 9 contain 15 map transformation parameters,
+		 * three per record.
+		 * For 24K and 100K DLGs, that use UTM coordinates, only the first
+		 * two parameters are non-zero.  The first one is the longitude of
+		 * the center of the cell.  The second one is the latitude of the
+		 * center of the cell.
+		 *
+		 * The values are in the form:  degrees * 1000000 + minutes * 1000 + seconds
+		 */
+		latitude1 = (lat_se + lat_ne) / 2.0;
+		decimal_degrees_to_dms(latitude1, &d, &m, &s);
+		latitude2 = (d < 0 ? -1.0 : 1.0) * (fabs((double)d) * 1000000.0 + (double)m * 1000.0 + s);
+		longitude1 = (long_se + long_sw) / 2.0;
+		decimal_degrees_to_dms(longitude1, &d, &m, &s);
+		longitude2 = (d < 0 ? -1.0 : 1.0) * (fabs((double)d) * 1000000.0 + (double)m * 1000.0 + s);
+
+		sprintf(buf, "% 24.15E% 24.15E% 24.15E        ", longitude2, latitude2, 0.0);
+		for (i = 0; i < 72; i++)  {
+			if (buf[i] == 'E')  buf[i] = 'D';	// USGS files use 'D' for exponentiation.
+		}
+
+		if (write(output_fdesc, buf, DLG_RECORD_LENGTH) != DLG_RECORD_LENGTH)  {
+			fprintf(stderr, "Failed to write output file.  errno = %d.\n", errno);
+			exit(0);
+		}
+		if (write(output_fdesc, "   0.000000000000000D+00   0.000000000000000D+00   0.000000000000000D+00        ", DLG_RECORD_LENGTH) != DLG_RECORD_LENGTH)  {
+			fprintf(stderr, "Failed to write output file.  errno = %d.\n", errno);
+			exit(0);
+		}
+		if (write(output_fdesc, "   0.000000000000000D+00   0.000000000000000D+00   0.000000000000000D+00        ", DLG_RECORD_LENGTH) != DLG_RECORD_LENGTH)  {
+			fprintf(stderr, "Failed to write output file.  errno = %d.\n", errno);
+			exit(0);
+		}
+		if (write(output_fdesc, "   0.000000000000000D+00   0.000000000000000D+00   0.000000000000000D+00        ", DLG_RECORD_LENGTH) != DLG_RECORD_LENGTH)  {
+			fprintf(stderr, "Failed to write output file.  errno = %d.\n", errno);
+			exit(0);
+		}
+		if (write(output_fdesc, "   0.000000000000000D+00   0.000000000000000D+00   0.000000000000000D+00        ", DLG_RECORD_LENGTH) != DLG_RECORD_LENGTH)  {
+			fprintf(stderr, "Failed to write output file.  errno = %d.\n", errno);
+			exit(0);
+		}
+
+
+
+		/*
+		 * Record 10 contains 4 internal file-to-map projection transformation parameters.
+		 * These apparently are used to convert internal file coordinates into a ground
+		 * planimetric system (presumably UTM).  Since the internal coordinates are already
+		 * in UTM, the record takes the form:  1.0 0.0 0.0 0.0.
+		 */
+		if (write(output_fdesc, " 0.10000000000D+01 0.00000000000D+00 0.00000000000D+00 0.00000000000D+00        ", DLG_RECORD_LENGTH) != DLG_RECORD_LENGTH)  {
+			fprintf(stderr, "Failed to write output file.  errno = %d.\n", errno);
+			exit(0);
+		}
+
+
+
+		/*
+		 * Records 11 through 14 define the four corners of the data.  Each record takes
+		 * a form like:
+		 *
+		 * SW       45.750000 -111.250000         480554.09  5066083.19
+		 *
+		 * where there is a two-letter code defining the corner, the latitude/longitude
+		 * in decimal degrees, and the UTM x and y coordinates.
+		 */
+		sprintf(buf, "%2.2s    % 12.6f%12.6f      % 12.2f% 12.2f                    ", "SW", lat_sw, long_sw, sw_x, sw_y);
+		if (write(output_fdesc, buf, DLG_RECORD_LENGTH) != DLG_RECORD_LENGTH)  {
+			fprintf(stderr, "Failed to write output file.  errno = %d.\n", errno);
+			exit(0);
+		}
+		sprintf(buf, "%2.2s    % 12.6f%12.6f      % 12.2f% 12.2f                    ", "NW", lat_nw, long_nw, nw_x, nw_y);
+		if (write(output_fdesc, buf, DLG_RECORD_LENGTH) != DLG_RECORD_LENGTH)  {
+			fprintf(stderr, "Failed to write output file.  errno = %d.\n", errno);
+			exit(0);
+		}
+		sprintf(buf, "%2.2s    % 12.6f%12.6f      % 12.2f% 12.2f                    ", "NE", lat_ne, long_ne, ne_x, ne_y);
+		if (write(output_fdesc, buf, DLG_RECORD_LENGTH) != DLG_RECORD_LENGTH)  {
+			fprintf(stderr, "Failed to write output file.  errno = %d.\n", errno);
+			exit(0);
+		}
+		sprintf(buf, "%2.2s    % 12.6f%12.6f      % 12.2f% 12.2f                    ", "SE", lat_se, long_se, se_x, se_y);
+		if (write(output_fdesc, buf, DLG_RECORD_LENGTH) != DLG_RECORD_LENGTH)  {
+			fprintf(stderr, "Failed to write output file.  errno = %d.\n", errno);
+			exit(0);
+		}
+
+
+		/*
+		 * Record 15 has to wait until we read in all of the node/line/area data,
+		 * since it contains numbers that tell how many nodes/lines/areas there are.
+		 */
+	}
+
+
+
+	/*
+	 * Parse the LE?? file, which contains segmented lines, this was the file name
+	 * originally passed into this routine.
+	 */
+	/*
+	 * Open the file in preparation for parsing.
+	 */
+	if (begin_ddf(passed_file_name) < 0)  {
+		fprintf(stderr, "Can't open %s for reading, errno = %d\n", passed_file_name, errno);
+		exit(0);
+	}
+	/*
+	 * Loop through the subfields until we find what we want.
+	 */
+	attrib = -1;	// Use this convenient variable as a non-related flag for first trip through loop.
+	count = 0;
+	while (get_subfield(&subfield) != 0)  {
+		if (strcmp(subfield.tag, "LINE") == 0)  {
+			if ((strstr(subfield.format, "I") != (char *)0) && (strcmp(subfield.label, "RCID") == 0))  {
+				/* We are starting a new line.  Initialize what need initializing. */
+				if (attrib >= 0)  {
+					/*
+					 * If we aren't starting the first line,
+					 * then terminate the attribute string and node list of the
+					 * previous line and update the counts.
+					 */
+					*current_attrib = (struct attribute *)0;
+					*current_point = (struct point *)0;
+					lines[num_lines].number_attrib = attrib;
+					lines[num_lines].number_coords = count;
+					uniq_attrib(&lines[num_lines].attribute, &lines[num_lines].number_attrib);
+				}
+				module_num = -1;
+				num_lines++;
+				count = 0;
+				attrib = 0;
+				if (num_lines >= MAX_LINES)  {
+					fprintf(stderr, "Ran out of space to store lines.  Some lines may be missing.\n");
+					break;
+				}
+				current_attrib = &lines[num_lines].attribute;
+				current_point = &lines[num_lines].point;
+				save_byte = subfield.value[subfield.length]; subfield.value[subfield.length] = '\0';
+				lines[num_lines].id = strtol(subfield.value, (char **)0, 10);
+				subfield.value[subfield.length] = save_byte;
+			}
+		}
+		else if (strcmp(subfield.tag, "ATID") == 0)  {
+			if ((strstr(subfield.format, "A") != (char *)0) && (strcmp(subfield.label, "MODN") == 0))  {
+				if (subfield.length != 4)  {
+					fprintf(stderr, "Attribute module name (%.*s) is not 4 characters long.\n", subfield.length, subfield.value);
+					module_num = -1;
+					continue;
+				}
+				for (module_num = 0; module_num < num_attrib_files; module_num++)  {
+					if (strncmp(subfield.value, attrib_files[module_num].module_name, 4) == 0)  {
+						break;
+					}
+				}
+				if (module_num == num_attrib_files)  {
+					fprintf(stderr, "Warning:  Attribute module has unexpected name (%.*s).  Attributes may be in error.\n", subfield.length, subfield.value);
+					module_num = -1;
+				}
+			}
+			else if ((strstr(subfield.format, "I") != (char *)0) && (strcmp(subfield.label, "RCID") == 0))  {
+				if (module_num < 0)  {
+					continue;
+				}
+				save_byte = subfield.value[subfield.length]; subfield.value[subfield.length] = '\0';
+				i =  strtol(subfield.value, (char **)0, 10);
+				subfield.value[subfield.length] = save_byte;
+				if (i <= attrib_files[module_num].num_attrib)  {
+					for (j = 0; j < MAX_EXTRA; j++)  {
+						if (attrib_files[module_num].attrib[i - 1].major[j] != 0)  {
+							*current_attrib = (struct attribute *)malloc(sizeof(struct attribute));
+							if (*current_attrib == (struct attribute *)0)  {
+								fprintf(stderr, "malloc failed\n");
+								exit(0);
+							}
+							(*current_attrib)->major = attrib_files[module_num].attrib[i - 1].major[j];
+							(*current_attrib)->minor = attrib_files[module_num].attrib[i - 1].minor[j];
+
+							current_attrib = &((*current_attrib)->attribute);
+							attrib++;
+						}
+					}
+				}
+			}
+		}
+		else if (strcmp(subfield.tag, "PIDL") == 0)  {
+			if ((strstr(subfield.format, "I") != (char *)0) && (strcmp(subfield.label, "RCID") == 0))  {
+				save_byte = subfield.value[subfield.length]; subfield.value[subfield.length] = '\0';
+				lines[num_lines].left_area = strtol(subfield.value, (char **)0, 10);
+				subfield.value[subfield.length] = save_byte;
+			}
+		}
+		else if (strcmp(subfield.tag, "PIDR") == 0)  {
+			if ((strstr(subfield.format, "I") != (char *)0) && (strcmp(subfield.label, "RCID") == 0))  {
+				save_byte = subfield.value[subfield.length]; subfield.value[subfield.length] = '\0';
+				lines[num_lines].right_area = strtol(subfield.value, (char **)0, 10);
+				subfield.value[subfield.length] = save_byte;
+			}
+		}
+		else if (strcmp(subfield.tag, "SNID") == 0)  {
+			if ((strstr(subfield.format, "I") != (char *)0) && (strcmp(subfield.label, "RCID") == 0))  {
+				save_byte = subfield.value[subfield.length]; subfield.value[subfield.length] = '\0';
+				lines[num_lines].start_node = strtol(subfield.value, (char **)0, 10);
+				subfield.value[subfield.length] = save_byte;
+			}
+		}
+		else if (strcmp(subfield.tag, "ENID") == 0)  {
+			if ((strstr(subfield.format, "I") != (char *)0) && (strcmp(subfield.label, "RCID") == 0))  {
+				save_byte = subfield.value[subfield.length]; subfield.value[subfield.length] = '\0';
+				lines[num_lines].end_node = strtol(subfield.value, (char **)0, 10);
+				subfield.value[subfield.length] = save_byte;
+			}
+		}
+		else if (strcmp(subfield.tag, "SADR") == 0)  {
+			/*
+			 * We assume that the X coordinate always comes first.
+			 */
+			if ((strstr(subfield.format, "B") != (char *)0) && (strcmp(subfield.label, "X") == 0))  {
+				*current_point = (struct point *)malloc(sizeof(struct point));
+				if (*current_point == (struct point *)0)  {
+					fprintf(stderr, "malloc failed\n");
+					exit(0);
+				}
+
+				if (subfield.length != 4)  {
+					/* Error */
+					(*current_point)->x = -1.0;
+				}
+				else  {
+					i = (((long)subfield.value[3] & 0xff) << 24) |
+					    (((long)subfield.value[2] & 0xff) << 16) |
+					    (((long)subfield.value[1] & 0xff) <<  8) |
+					     ((long)subfield.value[0] & 0xff);
+					if (byte_order == 0)  {
+						(*current_point)->x = (double)i * x_scale_factor + x_origin;
+					}
+					else if (byte_order == 1)  {
+						LE_SWAB(&i);
+						(*current_point)->x = (double)i * x_scale_factor + x_origin;
+					}
+					else if (byte_order == 2)  {
+						PDP_SWAB(&i);
+						(*current_point)->x = (double)i * x_scale_factor + x_origin;
+					}
+				}
+			}
+			else if ((strstr(subfield.format, "B") != (char *)0) && (strcmp(subfield.label, "Y") == 0))  {
+				if (subfield.length != 4)  {
+					/* Error */
+					(*current_point)->y = -1.0;
+				}
+				else  {
+					i = (((long)subfield.value[3] & 0xff) << 24) |
+					    (((long)subfield.value[2] & 0xff) << 16) |
+					    (((long)subfield.value[1] & 0xff) <<  8) |
+					     ((long)subfield.value[0] & 0xff);
+					if (byte_order == 0)  {
+						(*current_point)->y = (double)i * y_scale_factor + y_origin;
+					}
+					else if (byte_order == 1)  {
+						LE_SWAB(&i);
+						(*current_point)->y = (double)i * y_scale_factor + y_origin;
+					}
+					else if (byte_order == 2)  {
+						PDP_SWAB(&i);
+						(*current_point)->y = (double)i * y_scale_factor + y_origin;
+					}
+				}
+
+				current_point = &((*current_point)->point);
+				count++;
+			}
+		}
+	}
+	if (num_lines >= 0)  {
+		/*
+		 * If we had at least one line
+		 * then close out the attribute and node information of the
+		 * previous line and update the counts.
+		 */
+		*current_attrib = (struct attribute *)0;
+		*current_point = (struct point *)0;
+		lines[num_lines].number_attrib = attrib;
+		lines[num_lines].number_coords = count;
+		uniq_attrib(&lines[num_lines].attribute, &lines[num_lines].number_attrib);
+	}
+	num_lines++;
+	/* We are done with this file, so close it. */
+	end_ddf();
+
+
+
+	/*
+	 * Parse with the NO?? file, which contains normal planar nodes.
+	 * These nodes lie at either end of a piecewise-linear feature.
+	 */
+	strncpy(file_name, passed_file_name, MAX_FILE_NAME);
+	if (upper_case_flag == 0)  {
+		if (gz_flag != 0)  {
+			file_name[file_name_length - 11] = 'n';
+			file_name[file_name_length - 10] = 'o';
+		}
+		else  {
+			file_name[file_name_length -  8] = 'n';
+			file_name[file_name_length -  7] = 'o';
+		}
+	}
+	else  {
+		if (gz_flag != 0)  {
+			file_name[file_name_length - 11] = 'N';
+			file_name[file_name_length - 10] = 'O';
+		}
+		else  {
+			file_name[file_name_length -  8] = 'N';
+			file_name[file_name_length -  7] = 'O';
+		}
+	}
+	/*
+	 * Open the file in preparation for parsing.
+	 */
+	if (begin_ddf(file_name) < 0)  {
+		fprintf(stderr, "Can't open %s for reading, errno = %d\n", file_name, errno);
+		exit(0);
+	}
+	/*
+	 * Loop through the subfields until we find what we want.
+	 */
+	attrib = -1;	// Use this convenient variable as a non-related flag for first trip through loop.
+	while (get_subfield(&subfield) != 0)  {
+		if (strcmp(subfield.tag, "PNTS") == 0)  {
+			if ((strstr(subfield.format, "I") != (char *)0) && (strcmp(subfield.label, "RCID") == 0))  {
+				/* We are starting a new node.  Initialize what needs initializing. */
+				if (attrib >= 0)  {
+					/*
+					 * If we aren't starting the first node,
+					 * then terminate the attribute string of the
+					 * previous node and update the count.
+					 */
+					*current_attrib = (struct attribute *)0;
+					nodes[num_nodes].number_attrib = attrib;
+					uniq_attrib(&nodes[num_nodes].attribute, &nodes[num_nodes].number_attrib);
+				}
+				module_num = -1;
+				num_nodes++;
+				attrib = 0;
+				if (num_nodes >= MAX_NODES)  {
+					fprintf(stderr, "Ran out of space to store nodes.  Some nodes may be missing.\n");
+					break;
+				}
+				current_attrib = &nodes[num_nodes].attribute;
+				save_byte = subfield.value[subfield.length]; subfield.value[subfield.length] = '\0';
+				nodes[num_nodes].id = strtol(subfield.value, (char **)0, 10);
+				subfield.value[subfield.length] = save_byte;
+			}
+		}
+		else if (strcmp(subfield.tag, "ATID") == 0)  {
+			if ((strstr(subfield.format, "A") != (char *)0) && (strcmp(subfield.label, "MODN") == 0))  {
+				if (subfield.length != 4)  {
+					fprintf(stderr, "Attribute module name (%.*s) is not 4 characters long.\n", subfield.length, subfield.value);
+					module_num = -1;
+					continue;
+				}
+				for (module_num = 0; module_num < num_attrib_files; module_num++)  {
+					if (strncmp(subfield.value, attrib_files[module_num].module_name, 4) == 0)  {
+						break;
+					}
+				}
+				if (module_num == num_attrib_files)  {
+					fprintf(stderr, "Warning:  Attribute module has unexpected name (%.*s).  Attributes may be in error.\n", subfield.length, subfield.value);
+					module_num = -1;
+				}
+			}
+			else if ((strstr(subfield.format, "I") != (char *)0) && (strcmp(subfield.label, "RCID") == 0))  {
+				if (module_num < 0)  {
+					continue;
+				}
+				save_byte = subfield.value[subfield.length]; subfield.value[subfield.length] = '\0';
+				i =  strtol(subfield.value, (char **)0, 10);
+				subfield.value[subfield.length] = save_byte;
+				if (i <= attrib_files[module_num].num_attrib)  {
+					for (j = 0; j < MAX_EXTRA; j++)  {
+						if (attrib_files[module_num].attrib[i - 1].major[j] != 0)  {
+							*current_attrib = (struct attribute *)malloc(sizeof(struct attribute));
+							if (*current_attrib == (struct attribute *)0)  {
+								fprintf(stderr, "malloc failed\n");
+								exit(0);
+							}
+							(*current_attrib)->major = attrib_files[module_num].attrib[i - 1].major[j];
+							(*current_attrib)->minor = attrib_files[module_num].attrib[i - 1].minor[j];
+
+							current_attrib = &((*current_attrib)->attribute);
+							attrib++;
+						}
+					}
+				}
+			}
+		}
+		else if (strcmp(subfield.tag, "SADR") == 0)  {
+			/*
+			 * We assume that the X coordinate always comes first.
+			 */
+			if ((strstr(subfield.format, "B") != (char *)0) && (strcmp(subfield.label, "X") == 0))  {
+				if (subfield.length != 4)  {
+					/* Error */
+					nodes[num_nodes].x = -1.0;
+				}
+				else  {
+					i = (((long)subfield.value[3] & 0xff) << 24) |
+					    (((long)subfield.value[2] & 0xff) << 16) |
+					    (((long)subfield.value[1] & 0xff) <<  8) |
+					     ((long)subfield.value[0] & 0xff);
+					if (byte_order == 0)  {
+						nodes[num_nodes].x = (double)i * x_scale_factor + x_origin;
+					}
+					else if (byte_order == 1)  {
+						LE_SWAB(&i);
+						nodes[num_nodes].x = (double)i * x_scale_factor + x_origin;
+					}
+					else if (byte_order == 2)  {
+						PDP_SWAB(&i);
+						nodes[num_nodes].x = (double)i * x_scale_factor + x_origin;
+					}
+				}
+			}
+			else if ((strstr(subfield.format, "B") != (char *)0) && (strcmp(subfield.label, "Y") == 0))  {
+				if (subfield.length != 4)  {
+					/* Error */
+					nodes[num_nodes].y = -1.0;
+				}
+				else  {
+					i = (((long)subfield.value[3] & 0xff) << 24) |
+					    (((long)subfield.value[2] & 0xff) << 16) |
+					    (((long)subfield.value[1] & 0xff) <<  8) |
+					     ((long)subfield.value[0] & 0xff);
+					if (byte_order == 0)  {
+						nodes[num_nodes].y = (double)i * y_scale_factor + y_origin;
+					}
+					else if (byte_order == 1)  {
+						LE_SWAB(&i);
+						nodes[num_nodes].y = (double)i * y_scale_factor + y_origin;
+					}
+					else if (byte_order == 2)  {
+						PDP_SWAB(&i);
+						nodes[num_nodes].y = (double)i * y_scale_factor + y_origin;
+					}
+				}
+			}
+		}
+	}
+	if (num_nodes >= 0)  {
+		/*
+		 * If we had at least one node
+		 * then close out the attribute information of the
+		 * previous line and node and update the counts.
+		 */
+		*current_attrib = (struct attribute *)0;
+		nodes[num_nodes].number_attrib = attrib;
+		uniq_attrib(&nodes[num_nodes].attribute, &nodes[num_nodes].number_attrib);
+	}
+	num_nodes++;
+	num_NO_nodes = num_nodes;
+	/* We are done with this file, so close it. */
+	end_ddf();
+
+
+
+	/*
+	 * Parse the NE?? file, which contains degenerate lines.
+	 * In the optional-format DLG files, degenerate lines appear both in the
+	 * node section and the line section.  In SDTS, they appear only in the
+	 * NE?? module.  We simultaneously add them to the line list and the node list.
+	 *
+	 * When we add the node to the line array, we must artificially build
+	 * a node list that is two nodes long, although the two nodes are
+	 * identical.
+	 *
+	 * We have already read nodes from the NO?? module into the nodes array.
+	 * Add the new nodes at the end of the array.
+	 *
+	 * The only other node files are NP??, which contains registration
+	 * points for the four corners of the data, and NA??, which contains
+	 * area representative points.  Neither of these files affects the nodes
+	 * array.  Thus, after this block of code, the nodes array should be complete.
+	 */
+	strncpy(file_name, passed_file_name, MAX_FILE_NAME);
+	if (upper_case_flag == 0)  {
+		if (gz_flag != 0)  {
+			file_name[file_name_length - 11] = 'n';
+			file_name[file_name_length - 10] = 'e';
+		}
+		else  {
+			file_name[file_name_length -  8] = 'n';
+			file_name[file_name_length -  7] = 'e';
+		}
+	}
+	else  {
+		if (gz_flag != 0)  {
+			file_name[file_name_length - 11] = 'N';
+			file_name[file_name_length - 10] = 'E';
+		}
+		else  {
+			file_name[file_name_length -  8] = 'N';
+			file_name[file_name_length -  7] = 'E';
+		}
+	}
+	/*
+	 * Open the file in preparation for parsing.
+	 */
+	if (begin_ddf(file_name) >= 0)  {
+		/*
+		 * Loop through the subfields until we find what we want.
+		 */
+		attrib = -1;	// Use this convenient variable as a non-related flag for first trip through loop.
+		count = 0;
+		num_lines--;
+		num_nodes--;
+		while (get_subfield(&subfield) != 0)  {
+			if (strcmp(subfield.tag, "PNTS") == 0)  {
+				if ((strstr(subfield.format, "I") != (char *)0) && (strcmp(subfield.label, "RCID") == 0))  {
+					/* We are starting a new node.  Initialize what needs initializing. */
+					if (attrib >= 0)  {
+						/*
+						 * If we aren't starting the first node,
+						 * then terminate the attribute string of the
+						 * previous line and update the counts.
+						 */
+						*current_attrib = (struct attribute *)0;
+						*current_point = (struct point *)0;
+						lines[num_lines].number_attrib = attrib;
+						lines[num_lines].number_coords = count;
+						uniq_attrib(&lines[num_lines].attribute, &lines[num_lines].number_attrib);
+	
+//						*current_attrib2 = (struct attribute *)0;
+//						nodes[num_nodes].number_attrib = attrib;
+//						uniq_attrib(&nodes[num_nodes].attribute, &nodes[num_nodes].number_attrib);
+					}
+					module_num = -1;
+					num_nodes++;
+					num_lines++;
+					count = 0;
+					attrib = 0;
+					if (num_nodes >= MAX_NODES)  {
+						fprintf(stderr, "Ran out of space to store nodes.  Some nodes may be missing.\n");
+						break;
+					}
+					if (num_lines >= MAX_LINES)  {
+						fprintf(stderr, "Ran out of space to store lines.  Some lines may be missing.\n");
+						break;
+					}
+//					current_attrib2 = &nodes[num_nodes].attribute;
+					save_byte = subfield.value[subfield.length]; subfield.value[subfield.length] = '\0';
+					nodes[num_nodes].id = strtol(subfield.value, (char **)0, 10);
+					subfield.value[subfield.length] = save_byte;
+	
+					current_attrib = &lines[num_lines].attribute;
+					current_point = &lines[num_lines].point;
+					lines[num_lines].id = nodes[num_nodes].id;
+					lines[num_lines].start_node = nodes[num_nodes].id;
+					lines[num_lines].end_node = nodes[num_nodes].id;
+				}
+			}
+			else if (strcmp(subfield.tag, "ATID") == 0)  {
+				if ((strstr(subfield.format, "A") != (char *)0) && (strcmp(subfield.label, "MODN") == 0))  {
+					if (subfield.length != 4)  {
+						fprintf(stderr, "Attribute module name (%.*s) is not 4 characters long.\n", subfield.length, subfield.value);
+						module_num = -1;
+						continue;
+					}
+					for (module_num = 0; module_num < num_attrib_files; module_num++)  {
+						if (strncmp(subfield.value, attrib_files[module_num].module_name, 4) == 0)  {
+							break;
+						}
+					}
+					if (module_num == num_attrib_files)  {
+						fprintf(stderr, "Warning:  Attribute module has unexpected name (%.*s).  Attributes may be in error.\n", subfield.length, subfield.value);
+						module_num = -1;
+					}
+				}
+				else if ((strstr(subfield.format, "I") != (char *)0) && (strcmp(subfield.label, "RCID") == 0))  {
+					if (module_num < 0)  {
+						continue;
+					}
+					save_byte = subfield.value[subfield.length]; subfield.value[subfield.length] = '\0';
+					i =  strtol(subfield.value, (char **)0, 10);
+					subfield.value[subfield.length] = save_byte;
+					if (i <= attrib_files[module_num].num_attrib)  {
+						for (j = 0; j < MAX_EXTRA; j++)  {
+							if (attrib_files[module_num].attrib[i - 1].major[j] != 0)  {
+								*current_attrib = (struct attribute *)malloc(sizeof(struct attribute));
+								if (*current_attrib == (struct attribute *)0)  {
+									fprintf(stderr, "malloc failed\n");
+									exit(0);
+								}
+//								*current_attrib2 = (struct attribute *)malloc(sizeof(struct attribute));
+//								if (*current_attrib2 == (struct attribute *)0)  {
+//									fprintf(stderr, "malloc failed\n");
+//									exit(0);
+//								}
+
+								(*current_attrib)->major = attrib_files[module_num].attrib[i - 1].major[j];	// line attribute entry
+								(*current_attrib)->minor = attrib_files[module_num].attrib[i - 1].minor[j];
+
+//								(*current_attrib2)->major = attrib_files[module_num].attrib[i - 1].major[j];	// node attribute entry
+//								(*current_attrib2)->minor = attrib_files[module_num].attrib[i - 1].minor[j];
+
+								current_attrib = &((*current_attrib)->attribute);
+//								current_attrib2 = &((*current_attrib2)->attribute);
+								attrib++;
+							}
+						}
+					}
+				}
+			}
+			else if (strcmp(subfield.tag, "ARID") == 0)  {
+				if ((strstr(subfield.format, "I") != (char *)0) && (strcmp(subfield.label, "RCID") == 0))  {
+					save_byte = subfield.value[subfield.length]; subfield.value[subfield.length] = '\0';
+					i =  strtol(subfield.value, (char **)0, 10);
+					subfield.value[subfield.length] = save_byte;
+					lines[num_lines].left_area = i;
+					lines[num_lines].right_area = i;
+				}
+			}
+			else if (strcmp(subfield.tag, "SADR") == 0)  {
+				/*
+				 * We assume that the X coordinate always comes first.
+				 */
+				if ((strstr(subfield.format, "B") != (char *)0) && (strcmp(subfield.label, "X") == 0))  {
+					/* We must add an extra endpoint, so get two storage slots instead of one. */
+					*current_point = (struct point *)malloc(sizeof(struct point));
+					if (*current_point == (struct point *)0)  {
+						fprintf(stderr, "malloc failed\n");
+						exit(0);
+					}
+					current_point2 = &((*current_point)->point);
+					*current_point2 = (struct point *)malloc(sizeof(struct point));
+					if (*current_point2 == (struct point *)0)  {
+						fprintf(stderr, "malloc failed\n");
+						exit(0);
+					}
+	
+					if (subfield.length != 4)  {
+						/* Error */
+						nodes[num_nodes].x = -1.0;
+						(*current_point)->x = -1.0;
+						(*current_point2)->x = -1.0;
+					}
+					else  {
+						i = (((long)subfield.value[3] & 0xff) << 24) |
+						    (((long)subfield.value[2] & 0xff) << 16) |
+						    (((long)subfield.value[1] & 0xff) <<  8) |
+						     ((long)subfield.value[0] & 0xff);
+						if (byte_order == 0)  {
+							nodes[num_nodes].x = (double)i * x_scale_factor + x_origin;
+							(*current_point)->x = nodes[num_nodes].x;
+							(*current_point2)->x = nodes[num_nodes].x;
+						}
+						else if (byte_order == 1)  {
+							LE_SWAB(&i);
+							nodes[num_nodes].x = (double)i * x_scale_factor + x_origin;
+							(*current_point)->x = nodes[num_nodes].x;
+							(*current_point2)->x = nodes[num_nodes].x;
+						}
+						else if (byte_order == 2)  {
+							PDP_SWAB(&i);
+							nodes[num_nodes].x = (double)i * x_scale_factor + x_origin;
+							(*current_point)->x = nodes[num_nodes].x;
+							(*current_point2)->x = nodes[num_nodes].x;
+						}
+					}
+				}
+				else if ((strstr(subfield.format, "B") != (char *)0) && (strcmp(subfield.label, "Y") == 0))  {
+					if (subfield.length != 4)  {
+						/* Error */
+						nodes[num_nodes].y = -1.0;
+						(*current_point)->y = -1.0;
+						(*current_point2)->y = -1.0;
+					}
+					else  {
+						i = (((long)subfield.value[3] & 0xff) << 24) |
+						    (((long)subfield.value[2] & 0xff) << 16) |
+						    (((long)subfield.value[1] & 0xff) <<  8) |
+						     ((long)subfield.value[0] & 0xff);
+						if (byte_order == 0)  {
+							nodes[num_nodes].y = (double)i * y_scale_factor + y_origin;
+							(*current_point)->y = nodes[num_nodes].y;
+							(*current_point2)->y = nodes[num_nodes].y;
+						}
+						else if (byte_order == 1)  {
+							LE_SWAB(&i);
+							nodes[num_nodes].y = (double)i * y_scale_factor + y_origin;
+							(*current_point)->y = nodes[num_nodes].y;
+							(*current_point2)->y = nodes[num_nodes].y;
+						}
+						else if (byte_order == 2)  {
+							PDP_SWAB(&i);
+							nodes[num_nodes].y = (double)i * y_scale_factor + y_origin;
+							(*current_point)->y = nodes[num_nodes].y;
+							(*current_point2)->y = nodes[num_nodes].y;
+						}
+					}
+	
+					current_point = &((*current_point2)->point);
+					count++;
+					count++;
+				}
+			}
+		}
+		if (num_nodes >= 0)  {
+			/*
+			 * If we had at least one node
+			 * then close out the attribute information of the
+			 * previous line and node and update the counts.
+			 */
+			*current_attrib = (struct attribute *)0;
+			*current_point = (struct point *)0;
+			lines[num_lines].number_attrib = attrib;
+			lines[num_lines].number_coords = count;
+			uniq_attrib(&lines[num_lines].attribute, &lines[num_lines].number_attrib);
+	
+//			*current_attrib2 = (struct attribute *)0;
+//			nodes[num_nodes].number_attrib = attrib;
+//			uniq_attrib(&nodes[num_nodes].attribute, &nodes[num_nodes].number_attrib);
+// For degenerate lines, the nodes don't appear to have the attributes attached.
+// Thus, we simply zero out the attribute count.  However, we will leave the
+// code in place for now, that keeps an attribute list for the nodes, just in
+// case I am misunderstanding something.
+			nodes[num_nodes].number_attrib = 0;
+		}
+		num_lines++;
+		num_nodes++;
+		/* We are done with this file, so close it. */
+		end_ddf();
+	}
+
+
+
+	/*
+	 * Before we can process the areas, we need to read the Polygon module
+	 * and find out which attributes are associated with each polygon.
+	 * Then we can read the NA?? module and associate the Area representative
+	 * points, from the NA?? module, with specific attributes, referenced in
+	 * the polygon (PC??) module, via the polygon linkage in the NA?? module.
+	 */
+	strncpy(file_name, passed_file_name, MAX_FILE_NAME);
+	if (upper_case_flag == 0)  {
+		if (gz_flag != 0)  {
+			file_name[file_name_length - 11] = 'p';
+			file_name[file_name_length - 10] = 'c';
+		}
+		else  {
+			file_name[file_name_length -  8] = 'p';
+			file_name[file_name_length -  7] = 'c';
+		}
+	}
+	else  {
+		if (gz_flag != 0)  {
+			file_name[file_name_length - 11] = 'P';
+			file_name[file_name_length - 10] = 'C';
+		}
+		else  {
+			file_name[file_name_length -  8] = 'P';
+			file_name[file_name_length -  7] = 'C';
+		}
+	}
+	/*
+	 * Open the file in preparation for parsing.
+	 */
+	num_polys = 0;
+	if (begin_ddf(file_name) >= 0)  {
+		/*
+		 * Loop through the subfields until we find what we want.
+		 */
+		while (get_subfield(&subfield) != 0)  {
+			if (strcmp(subfield.tag, "POLY") == 0)  {
+				if ((strstr(subfield.format, "I") != (char *)0) && (strcmp(subfield.label, "RCID") == 0))  {
+					save_byte = subfield.value[subfield.length]; subfield.value[subfield.length] = '\0';
+					current_poly =  strtol(subfield.value, (char **)0, 10);
+					subfield.value[subfield.length] = save_byte;
+				}
+			}
+			else if (strcmp(subfield.tag, "ATID") == 0)  {
+				if ((strstr(subfield.format, "A") != (char *)0) && (strcmp(subfield.label, "MODN") == 0))  {
+					if (subfield.length != 4)  {
+						fprintf(stderr, "Attribute module name (%.*s) is not 4 characters long.\n", subfield.length, subfield.value);
+						module_num = -1;
+						continue;
+					}
+					for (module_num = 0; module_num < num_attrib_files; module_num++)  {
+						if (strncmp(subfield.value, attrib_files[module_num].module_name, 4) == 0)  {
+							break;
+						}
+					}
+					if (module_num == num_attrib_files)  {
+						fprintf(stderr, "Warning:  Attribute module has unexpected name (%.*s).  Attributes may be in error.\n", subfield.length, subfield.value);
+						module_num = -1;
+					}
+				}
+				else if ((strstr(subfield.format, "I") != (char *)0) && (strcmp(subfield.label, "RCID") == 0))  {
+					if (module_num < 0)  {
+						continue;
+					}
+					if (num_polys >= MAX_POLY_NUM)  {
+						fprintf(stderr, "Ran out of polygon space.  Some attributes may not show up.\n");
+						break;
+					}
+					polygon_attrib[num_polys].poly_id = current_poly;
+					save_byte = subfield.value[subfield.length]; subfield.value[subfield.length] = '\0';
+					polygon_attrib[num_polys].attrib = strtol(subfield.value, (char **)0, 10);
+					subfield.value[subfield.length] = save_byte;
+					polygon_attrib[num_polys++].module_num = module_num;
+				}
+			}
+		}
+		/* We are done with this file, so close it. */
+		end_ddf();
+	}
+
+
+	/*
+	 * Now read in the Areas Module and store the data.
+	 *
+	 * Before we do, though, generate an entry for area 1.
+	 * This is the neatline polygon that surrounds the data
+	 * area.  It is not encoded in the NA??  module,
+	 * but there is an entry for the polygon in
+	 * the PC?? module.  Since we won't find it while searching
+	 * the NA?? module, and since we need it for a complete
+	 * DLG-3 file, insert it artificially at the beginning
+	 * of the array.  There does not appear to be any way
+	 * to recover the original pre-SDTS representative point for this
+	 * area, so just insert the southwest registration point,
+	 * since the few sample files I looked at seemed to use a
+	 * representative point near that corner.
+	 */
+	num_areas++;
+	areas[num_areas].id = 1;
+	areas[num_areas].x = sw_x;
+	areas[num_areas].y = sw_y;
+	areas[num_areas].number_attrib = 0;
+	areas[num_areas].attribute = (struct attribute *)0;
+
+	strncpy(file_name, passed_file_name, MAX_FILE_NAME);
+	if (upper_case_flag == 0)  {
+		if (gz_flag != 0)  {
+			file_name[file_name_length - 11] = 'n';
+			file_name[file_name_length - 10] = 'a';
+		}
+		else  {
+			file_name[file_name_length -  8] = 'n';
+			file_name[file_name_length -  7] = 'a';
+		}
+	}
+	else  {
+		if (gz_flag != 0)  {
+			file_name[file_name_length - 11] = 'N';
+			file_name[file_name_length - 10] = 'A';
+		}
+		else  {
+			file_name[file_name_length -  8] = 'N';
+			file_name[file_name_length -  7] = 'A';
+		}
+	}
+	/*
+	 * Open the file in preparation for parsing.
+	 */
+	if (begin_ddf(file_name) >= 0)  {
+		/*
+		 * Loop through the subfields until we find what we want.
+		 */
+		attrib = -1;	// Use this convenient variable as a non-related flag for first trip through loop.
+		while (get_subfield(&subfield) != 0)  {
+			if (strcmp(subfield.tag, "PNTS") == 0)  {
+				if ((strstr(subfield.format, "I") != (char *)0) && (strcmp(subfield.label, "RCID") == 0))  {
+					/* We are starting a new line.  Initialize what need initializing. */
+					if (attrib >= 0)  {
+						/*
+						 * If we aren't starting the first area,
+						 * then terminate the attribute string of the
+						 * previous area and update the counts.
+						 */
+						*current_attrib = (struct attribute *)0;
+						areas[num_areas].number_attrib = attrib;
+						uniq_attrib(&areas[num_areas].attribute, &areas[num_areas].number_attrib);
+					}
+					num_areas++;
+					attrib = 0;
+					if (num_areas >= MAX_AREAS)  {
+						fprintf(stderr, "Ran out of space to store areas.  Some areas may be missing.\n");
+						break;
+					}
+					current_attrib = &areas[num_areas].attribute;
+					save_byte = subfield.value[subfield.length]; subfield.value[subfield.length] = '\0';
+					areas[num_areas].id = strtol(subfield.value, (char **)0, 10);
+					subfield.value[subfield.length] = save_byte;
+				}
+			}
+			else if (strcmp(subfield.tag, "SADR") == 0)  {
+				/*
+				 * We assume that the X coordinate always comes first.
+				 */
+				if ((strstr(subfield.format, "B") != (char *)0) && (strcmp(subfield.label, "X") == 0))  {
+					if (subfield.length != 4)  {
+						/* Error */
+						areas[num_areas].x = -1.0;
+					}
+					else  {
+						i = (((long)subfield.value[3] & 0xff) << 24) |
+						    (((long)subfield.value[2] & 0xff) << 16) |
+						    (((long)subfield.value[1] & 0xff) <<  8) |
+						     ((long)subfield.value[0] & 0xff);
+						if (byte_order == 0)  {
+							areas[num_areas].x = (double)i * x_scale_factor + x_origin;
+						}
+						else if (byte_order == 1)  {
+							LE_SWAB(&i);
+							areas[num_areas].x = (double)i * x_scale_factor + x_origin;
+						}
+						else if (byte_order == 2)  {
+							PDP_SWAB(&i);
+							areas[num_areas].x = (double)i * x_scale_factor + x_origin;
+						}
+					}
+				}
+				else if ((strstr(subfield.format, "B") != (char *)0) && (strcmp(subfield.label, "Y") == 0))  {
+					if (subfield.length != 4)  {
+						/* Error */
+						areas[num_areas].y = -1.0;
+					}
+					else  {
+						i = (((long)subfield.value[3] & 0xff) << 24) |
+						    (((long)subfield.value[2] & 0xff) << 16) |
+						    (((long)subfield.value[1] & 0xff) <<  8) |
+						     ((long)subfield.value[0] & 0xff);
+						if (byte_order == 0)  {
+							areas[num_areas].y = (double)i * y_scale_factor + y_origin;
+						}
+						else if (byte_order == 1)  {
+							LE_SWAB(&i);
+							areas[num_areas].y = (double)i * y_scale_factor + y_origin;
+						}
+						else if (byte_order == 2)  {
+							PDP_SWAB(&i);
+							areas[num_areas].y = (double)i * y_scale_factor + y_origin;
+						}
+					}
+				}
+			}
+			else if (strcmp(subfield.tag, "ARID") == 0)  {
+				if ((strstr(subfield.format, "I") != (char *)0) && (strcmp(subfield.label, "RCID") == 0))  {
+					save_byte = subfield.value[subfield.length]; subfield.value[subfield.length] = '\0';
+					i =  strtol(subfield.value, (char **)0, 10);
+					subfield.value[subfield.length] = save_byte;
+	
+					for (j = 0; j < num_polys; j++)  {
+						if (polygon_attrib[j].poly_id == i)  {
+							if (polygon_attrib[j].attrib <= attrib_files[polygon_attrib[j].module_num].num_attrib)  {
+								for (k = 0; k < MAX_EXTRA; k++)  {
+									if (attrib_files[polygon_attrib[j].module_num].attrib[polygon_attrib[j].attrib - 1].major[k] != 0)  {
+										*current_attrib = (struct attribute *)malloc(sizeof(struct attribute));
+										if (*current_attrib == (struct attribute *)0)  {
+											fprintf(stderr, "malloc failed\n");
+											exit(0);
+										}
+	
+										(*current_attrib)->major = attrib_files[polygon_attrib[j].module_num].attrib[polygon_attrib[j].attrib - 1].major[k];
+										(*current_attrib)->minor = attrib_files[polygon_attrib[j].module_num].attrib[polygon_attrib[j].attrib - 1].minor[k];
+	
+										current_attrib = &((*current_attrib)->attribute);
+										attrib++;
+									}
+								}
+							}
+						}
+					}
+				}
+			}
+		}
+		if (num_areas >= 0)  {
+			/*
+			 * If we had at least one area
+			 * then close out the attribute and node information of the
+			 * previous area and update the counts.
+			 */
+			*current_attrib = (struct attribute *)0;
+			areas[num_areas].number_attrib = attrib;
+			uniq_attrib(&areas[num_areas].attribute, &areas[num_areas].number_attrib);
+		}
+		/* We are done with this file, so close it. */
+		end_ddf();
+	}
+	num_areas++;
+
+
+
+	/*
+	 * Because the various cross-links between Nodes, Lines, and Areas depend
+	 * on the Record IDs, we are faced with a choice.  Either we make sure
+	 * that each array index corresponds to the Record ID stored therein,
+	 * or we have to search the array every time we want a specific record.
+	 * We choose to do the former, so that the Record ID stored in array
+	 * element i is equal to i + 1.
+	 *
+	 * Rather than try to fill the arrays based on record IDs, and have to
+	 * do error checking to find empty slots in the arrays, we choose to
+	 * pack records into the arrays without regard for ordering.  After the
+	 * arrays are full, we sort them into order with qsort().  This should
+	 * be reasonably low-cost, in terms of CPU time, because the arrays
+	 * should be pretty close to the correct order before we start.
+	 *
+	 * The areas array should be in order and won't need sorting.
+	 * The lines array may have some deviations from correct
+	 * ordering because the degenerate lines are stored in a separate file (the NE?? file),
+	 * and read in separately at the end of the array.
+	 *
+	 * The nodes array will not have the record-ID-to-index correspondence
+	 * because it results from reading both the NE?? module and the NO?? module,
+	 * with the corresponding records appearing in two separate groups in the
+	 * array.
+	 *
+	 * We won't sort the nodes array with qsort.  This is because the degenerate lines,
+	 * from the NE?? module, have the same Record IDs as the associated entry in
+	 * the lines array.  (In other words, in the original data, Node 42 and Line
+	 * 243 may have both represented the same degenerate line.  In the SDTS data,
+	 * both the line and the node have Record ID 243.  (I think this may be a bug
+	 * in the USGS conversion procedure, but maybe they did it on purpose.)  The
+	 * big problem with this is that we could easily have a node with the same
+	 * Record ID of 243 in the NO?? module, resulting in two nodes in the nodes array
+	 * that have the same ID.)  We will reconstruct the original node
+	 * numbers and sort the array at the same time.
+	 * We do this by assuming that holes in the NO?? node numbering should be
+	 * sequentially filled by the points that were placed into the NE?? module.
+	 */
+	qsort(lines, num_lines, sizeof(struct lines), compare_lines);
+
+
+
+	/*
+	 * All of the useful data is parsed.
+	 * Now do something with it.
+	 *
+	 * Either enter the big block of code that writes all of the
+	 * data to an optional-format DLG file, or enter the big block
+	 * of code that writes the data to the image buffer.
+	 *
+	 * We sort the nodes array within the first big block, because
+	 * the node data isn't used when drawing a map.  If we ever
+	 * start using the node data for maps, then we will have to
+	 * move the sorting up to where we do the qsort() above.
+	 */
+	if (file_image_flag != 0)  {
+		/*
+		 * This is the big block of code that writes the data
+		 * to an optional-format DLG file.
+		 *
+		 * We now have enough information to write Record 15.
+		 */
+		sprintf(buf, "%-20.20s   0%6d%6d 010%6d%6d 010%6d%6d   1        ",
+			category_name, num_nodes, num_nodes, num_areas, num_areas, num_lines, num_lines);
+		if (write(output_fdesc, buf, DLG_RECORD_LENGTH) != DLG_RECORD_LENGTH)  {
+			fprintf(stderr, "Failed to write output file.  errno = %d.\n", errno);
+			exit(0);
+		}
+
+
+
+		/*
+		 * Now we are ready to write the node records to the file.
+		 * Begin by reconstructing the node list.
+		 * Fold all of the degenerate-line nodes into
+		 * the missing node slots in the node list.
+		 */
+		if (num_nodes > num_NO_nodes)  {
+			if (num_nodes >= MAX_NODES)  {	// We need one extra node slot for temporary storage
+				fprintf(stderr, "Ran out of space to store nodes.  Non-recoverable error.\n");
+				exit(0);
+			}
+			i = num_NO_nodes;
+			for (j = 0; j < num_nodes; j++)  {
+				if (nodes[j].id != (j + 1))  {
+					nodes[num_nodes].id = nodes[i].id;
+					nodes[num_nodes].x = nodes[i].x;
+					nodes[num_nodes].y = nodes[i].y;
+					nodes[num_nodes].number_attrib = nodes[i].number_attrib;
+					nodes[num_nodes].attribute = nodes[i].attribute;
+					for (k = i; k > j; k--)  {
+						nodes[k].id = nodes[k - 1].id;
+						nodes[k].x = nodes[k - 1].x;
+						nodes[k].y = nodes[k - 1].y;
+						nodes[k].number_attrib = nodes[k - 1].number_attrib;
+						nodes[k].attribute = nodes[k - 1].attribute;
+					}
+					nodes[j].id = nodes[num_nodes].id;
+					nodes[j].x = nodes[num_nodes].x;
+					nodes[j].y = nodes[num_nodes].y;
+					nodes[j].number_attrib = nodes[num_nodes].number_attrib;
+					nodes[j].attribute = nodes[num_nodes].attribute;
+					lines[nodes[j].id - 1].end_node = j + 1;
+					lines[nodes[j].id - 1].start_node = j + 1;
+					nodes[j].id = j + 1;
+					i++;
+					if (i == num_nodes)  {
+						break;
+					}
+				}
+			}
+		}
+		if (nodes[num_nodes - 1].id != num_nodes)  {
+			fprintf(stderr, "Warning:  The node section may have some problems.\n");
+		}
+		/*
+		 * Now go through the patched up nodes, and build a line list for
+		 * each node, and then print out the node record.
+		 * (The line list consists of all linear features for which this node is an endpoint.
+		 * The lines appear in no particular order.)
+		 */
+		for (i = 0; i < num_nodes; i++)  {
+			line_list_size = -1;
+			for (j = 0; j < num_lines; j++)  {
+				if (lines[j].start_node == nodes[i].id)  {
+					if ((line_list_size + 1) == MAX_LINE_LIST)  {
+						fprintf(stderr, "Ran out of space for a nodal line list (node %d).  Some lines are missing.\n", i + 1);
+						break;
+					}
+					line_list_size++;
+					line_list[line_list_size] = lines[j].id;
+				}
+				/* Note:  This is not an "else if" because degenerate lines must appear twice in the list. */
+				if (lines[j].end_node == nodes[i].id)  {
+					if ((line_list_size + 1) == MAX_LINE_LIST)  {
+						fprintf(stderr, "Ran out of space for a nodal line list (node %d).  Some lines are missing.\n", i + 1);
+						break;
+					}
+					line_list_size++;
+					line_list[line_list_size] = -lines[j].id;
+				}
+			}
+			line_list_size++;
+
+			/*
+			 * Print the first record of the node.
+			 */
+			sprintf(buf, "N%5d%12.2f%12.2f      %6d      %6d     0                    ",
+				i + 1, nodes[i].x, nodes[i].y, line_list_size, nodes[i].number_attrib);
+			if (write(output_fdesc, buf, DLG_RECORD_LENGTH) != DLG_RECORD_LENGTH)  {
+				fprintf(stderr, "Failed to write output file.  errno = %d.\n", errno);
+				exit(0);
+			}
+
+			/*
+			 * Print the line-list records.
+			 */
+			j = 0;
+			for (k = 0; k < line_list_size; k++)  {
+				sprintf(&buf[j], "%6d", line_list[k]);
+				j = j + 6;
+				if (j == 72)  {
+					for ( ; j < DLG_RECORD_LENGTH; j++)  {
+						buf[j] = ' ';
+					}
+					if (write(output_fdesc, buf, DLG_RECORD_LENGTH) != DLG_RECORD_LENGTH)  {
+						fprintf(stderr, "Failed to write output file.  errno = %d.\n", errno);
+						exit(0);
+					}
+					j = 0;
+				}
+			}
+			if (j > 0)  {
+				for ( ; j < DLG_RECORD_LENGTH; j++)  {
+					buf[j] = ' ';
+				}
+				if (write(output_fdesc, buf, DLG_RECORD_LENGTH) != DLG_RECORD_LENGTH)  {
+					fprintf(stderr, "Failed to write output file.  errno = %d.\n", errno);
+					exit(0);
+				}
+			}
+
+			/*
+			 * Print the attribute records.
+			 */
+			j = 0;
+			current_attrib = &nodes[i].attribute;
+			for (k = 0; k < nodes[i].number_attrib; k++)  {
+				sprintf(&buf[j], "%6d%6d", (*current_attrib)->major, (*current_attrib)->minor);
+				j = j + 12;
+				if (j == 72)  {
+					for ( ; j < DLG_RECORD_LENGTH; j++)  {
+						buf[j] = ' ';
+					}
+					if (write(output_fdesc, buf, DLG_RECORD_LENGTH) != DLG_RECORD_LENGTH)  {
+						fprintf(stderr, "Failed to write output file.  errno = %d.\n", errno);
+						exit(0);
+					}
+					j = 0;
+				}
+				current_attrib = &((*current_attrib)->attribute);
+			}
+			if (j > 0)  {
+				for ( ; j < DLG_RECORD_LENGTH; j++)  {
+					buf[j] = ' ';
+				}
+				if (write(output_fdesc, buf, DLG_RECORD_LENGTH) != DLG_RECORD_LENGTH)  {
+					fprintf(stderr, "Failed to write output file.  errno = %d.\n", errno);
+					exit(0);
+				}
+			}
+		}
+
+		/*
+		 * Now go through the areas, and build a line list for
+		 * each area, and then print out the area record.
+		 * (The line list is a list of the lines that bound this area.
+		 * The lines should appear in clockwise order around the perimeter
+		 * of the area.  If there are islands, their sublists are delimited by
+		 * inserting a line number of zero into the list ahead of them.
+		 * Island nodes are listed in counterclockwise order.)
+		 */
+		for (i = 0; i < num_areas; i++)  {
+			line_list_size = -1;
+			for (j = 0; j < num_lines; j++)  {
+				if (lines[j].left_area == lines[j].right_area)  {
+					continue;
+				}
+				if ((lines[j].right_area == areas[i].id) || (lines[j].left_area == areas[i].id))  {
+					if ((line_list_size + 2) == MAX_LINE_LIST)  {	// Reserve an empty spot at the end of the list for later.
+						fprintf(stderr, "Ran out of space for an areal line list.  Some lines are missing for area %d.\n", i + 1);
+						break;
+					}
+					line_list_size++;
+					line_list[line_list_size] = lines[j].id;
+				}
+			}
+			line_list_size++;
+
+			/*
+			 * Now we have the line list, but it still must be sorted
+			 * so that the linear features are contiguous.
+			 *
+			 * First, though, we want to make sure that we get the
+			 * primary area first in the list, with any islands coming after it
+			 * in the list.  In order to do this, we search for the
+			 * point in the line list that has the greatest y value,
+			 * and we put that point first in the list.  (No matter what
+			 * the shape of the enclosing polygon, no island should have
+			 * any points that are as far north as the northernmost point
+			 * in the enclosing polygon.)  The sorting
+			 * algorithm will pull contiguous points toward this first point,
+			 * and thus pull the primary area toward the front of the list.
+			 */
+			y = -11000000.0;
+			for (j = 0; j < line_list_size; j++)  {
+				current_point = &lines[line_list[j] - 1].point;
+				while (*current_point != (struct point *)0)  {
+					if ((*current_point)->y > y)  {
+						y = (*current_point)->y;
+						k = j;
+					}
+					current_point = &((*current_point)->point);
+				}
+			}
+			j = line_list[k];
+			line_list[k] = line_list[0];
+			line_list[0] = j;
+			/*
+			 * We have the northernmost point first in the list.
+			 * Now do the sorting.
+			 *
+			 * Whether sorting the main polygon, or an island,
+			 * the nodes are ordered so that the referenced area
+			 * is always on the right as we traverse each linear
+			 * component of the boundary.
+			 */
+			number_of_islands = 0;
+			for (j = 0; j < line_list_size; j++)  {
+				if (lines[line_list[j] - 1].right_area == areas[i].id)  {
+					start_node = lines[line_list[j] - 1].start_node;
+					current_node = lines[line_list[j] - 1].end_node;
+				}
+				else  {
+					start_node = lines[line_list[j] - 1].end_node;
+					current_node = lines[line_list[j] - 1].start_node;
+				}
+				if (start_node != current_node)  {
+					for (k = j + 1; k < line_list_size; k++)  {
+						if (lines[line_list[k] - 1].right_area == areas[i].id)  {
+							if (lines[line_list[k] - 1].start_node == current_node)  {
+								line_list[MAX_LINE_LIST - 1] = line_list[j + 1];
+								line_list[j + 1] = line_list[k];
+								line_list[k] = line_list[MAX_LINE_LIST - 1];
+								break;
+							}
+						}
+						else  {
+							if (lines[line_list[k] - 1].end_node == current_node)  {
+								line_list[MAX_LINE_LIST - 1] = line_list[j + 1];
+								line_list[j + 1] = line_list[k];
+								line_list[k] = line_list[MAX_LINE_LIST - 1];
+								break;
+							}
+						}
+					}
+				}
+				if (lines[line_list[j] - 1].left_area == areas[i].id)  {
+					line_list[j] = - line_list[j];
+				}
+				if (((start_node == current_node) || (k == line_list_size)) && (j < (line_list_size - 1)))  {
+					if ((line_list_size + 2) == MAX_LINE_LIST)  {	// Reserve an empty spot at the end of the list for later.
+						fprintf(stderr, "Ran out of space for an areal line list.  There may be errors in the line list for area %d.\n", i + 1);
+						break;
+					}
+					j++;
+					line_list[line_list_size] = line_list[j];
+					line_list[j] = 0;
+					line_list_size++;
+					number_of_islands++;
+				}
+			}
+
+			/*
+			 * Print the first record of the area.
+			 *
+			 * Special case for the Universe polygon, which needs a single attribute
+			 * that isn't stored in the attribute list.  It isn't in the list because
+			 * we artificially generated the Universe polygon from scratch before reading
+			 * in the NA?? module.
+			 */
+			if (i == 0)  {
+				sprintf(buf, "A%5d%12.2f%12.2f      %6d     0%6d     0%6d              ",
+					i + 1, areas[i].x, areas[i].y, line_list_size, 1, number_of_islands);
+			}
+			else  {
+				sprintf(buf, "A%5d%12.2f%12.2f      %6d     0%6d     0%6d              ",
+					i + 1, areas[i].x, areas[i].y, line_list_size, areas[i].number_attrib, number_of_islands);
+			}
+			if (write(output_fdesc, buf, DLG_RECORD_LENGTH) != DLG_RECORD_LENGTH)  {
+				fprintf(stderr, "Failed to write output file.  errno = %d.\n", errno);
+				exit(0);
+			}
+
+			/*
+			 * Print the line-list records.
+			 */
+			j = 0;
+			for (k = 0; k < line_list_size; k++)  {
+				sprintf(&buf[j], "%6d", line_list[k]);
+				j = j + 6;
+				if (j == 72)  {
+					for ( ; j < DLG_RECORD_LENGTH; j++)  {
+						buf[j] = ' ';
+					}
+					if (write(output_fdesc, buf, DLG_RECORD_LENGTH) != DLG_RECORD_LENGTH)  {
+						fprintf(stderr, "Failed to write output file.  errno = %d.\n", errno);
+						exit(0);
+					}
+					j = 0;
+				}
+			}
+			if (j > 0)  {
+				for ( ; j < DLG_RECORD_LENGTH; j++)  {
+					buf[j] = ' ';
+				}
+				if (write(output_fdesc, buf, DLG_RECORD_LENGTH) != DLG_RECORD_LENGTH)  {
+					fprintf(stderr, "Failed to write output file.  errno = %d.\n", errno);
+					exit(0);
+				}
+			}
+
+			/*
+			 * Print the attribute records.
+			 */
+			j = 0;
+			current_attrib = &areas[i].attribute;
+			for (k = 0; k < areas[i].number_attrib; k++)  {
+				sprintf(&buf[j], "%6d%6d", (*current_attrib)->major, (*current_attrib)->minor);
+				j = j + 12;
+				if (j == 72)  {
+					for ( ; j < DLG_RECORD_LENGTH; j++)  {
+						buf[j] = ' ';
+					}
+					if (write(output_fdesc, buf, DLG_RECORD_LENGTH) != DLG_RECORD_LENGTH)  {
+						fprintf(stderr, "Failed to write output file.  errno = %d.\n", errno);
+						exit(0);
+					}
+					j = 0;
+				}
+				current_attrib = &((*current_attrib)->attribute);
+			}
+			if (j > 0)  {
+				for ( ; j < DLG_RECORD_LENGTH; j++)  {
+					buf[j] = ' ';
+				}
+				if (write(output_fdesc, buf, DLG_RECORD_LENGTH) != DLG_RECORD_LENGTH)  {
+					fprintf(stderr, "Failed to write output file.  errno = %d.\n", errno);
+					exit(0);
+				}
+			}
+			/*
+			 * Special case for the Universe polygon, which is
+			 * supposed to have an attribute of 0, 0.
+			 */
+			if (i == 0)  {
+				sprintf(buf, "%6d%6d", 0, 0);
+				for (j = 12 ; j < DLG_RECORD_LENGTH; j++)  {
+					buf[j] = ' ';
+				}
+				if (write(output_fdesc, buf, DLG_RECORD_LENGTH) != DLG_RECORD_LENGTH)  {
+					fprintf(stderr, "Failed to write output file.  errno = %d.\n", errno);
+					exit(0);
+				}
+			}
+		}
+
+		/*
+		 * Now go through the lines, and print out the records.
+		 */
+		for (i = 0; i < num_lines; i++)  {
+			/*
+			 * Print the first record of the node.
+			 */
+			sprintf(buf, "L%5d%6d%6d%6d%6d            %6d%6d     0                    ",
+				i + 1, lines[i].start_node, lines[i].end_node, lines[i].left_area, lines[i].right_area,
+				lines[i].number_coords, lines[i].number_attrib);
+			if (write(output_fdesc, buf, DLG_RECORD_LENGTH) != DLG_RECORD_LENGTH)  {
+				fprintf(stderr, "Failed to write output file.  errno = %d.\n", errno);
+				exit(0);
+			}
+
+			/*
+			 * Print the coordinate-list records.
+			 */
+			j = 0;
+			current_point = &lines[i].point;
+			for (k = 0; k < lines[i].number_coords; k++)  {
+				sprintf(&buf[j], "%12.2f%12.2f", (*current_point)->x, (*current_point)->y);
+				j = j + 24;
+				if (j == 72)  {
+					for ( ; j < DLG_RECORD_LENGTH; j++)  {
+						buf[j] = ' ';
+					}
+					if (write(output_fdesc, buf, DLG_RECORD_LENGTH) != DLG_RECORD_LENGTH)  {
+						fprintf(stderr, "Failed to write output file.  errno = %d.\n", errno);
+						exit(0);
+					}
+					j = 0;
+				}
+				current_point = &((*current_point)->point);
+			}
+			if (j > 0)  {
+				for ( ; j < DLG_RECORD_LENGTH; j++)  {
+					buf[j] = ' ';
+				}
+				if (write(output_fdesc, buf, DLG_RECORD_LENGTH) != DLG_RECORD_LENGTH)  {
+					fprintf(stderr, "Failed to write output file.  errno = %d.\n", errno);
+					exit(0);
+				}
+			}
+
+			/*
+			 * Print the attribute records.
+			 */
+			j = 0;
+			current_attrib = &lines[i].attribute;
+			for (k = 0; k < lines[i].number_attrib; k++)  {
+				sprintf(&buf[j], "%6d%6d", (*current_attrib)->major, (*current_attrib)->minor);
+				j = j + 12;
+				if (j == 72)  {
+					for ( ; j < DLG_RECORD_LENGTH; j++)  {
+						buf[j] = ' ';
+					}
+					if (write(output_fdesc, buf, DLG_RECORD_LENGTH) != DLG_RECORD_LENGTH)  {
+						fprintf(stderr, "Failed to write output file.  errno = %d.\n", errno);
+						exit(0);
+					}
+					j = 0;
+				}
+				current_attrib = &((*current_attrib)->attribute);
+			}
+			if (j > 0)  {
+				for ( ; j < DLG_RECORD_LENGTH; j++)  {
+					buf[j] = ' ';
+				}
+				if (write(output_fdesc, buf, DLG_RECORD_LENGTH) != DLG_RECORD_LENGTH)  {
+					fprintf(stderr, "Failed to write output file.  errno = %d.\n", errno);
+					exit(0);
+				}
+			}
+		}
+		close(output_fdesc);
+	}
+	else  {
+		/*
+		 * This is the big block of code that writes the data
+		 * to the drawmap image buffer.
+		 *
+		 * First find the x and y image coordinates that border this DLG chunk.
+		 *
+		 * Then draw the lines for which we have appropriate atribute codes stored,
+		 * but don't go outside the x-y border.
+		 *
+		 * Then fill in all of the areas for which we have
+		 * appropriate attribute codes stored, but don't go outside
+		 * the x-y border.
+		 */
+		dlg_x_low = -1 + round((long_sw - image_corners->sw_long) * (double)image_corners->x / (image_corners->ne_long - image_corners->sw_long));
+		dlg_y_low = image_corners->y - 1 - round((lat_ne - image_corners->sw_lat) * (double)image_corners->y / (image_corners->ne_lat - image_corners->sw_lat));
+		dlg_x_high = -1 + round((long_ne - image_corners->sw_long) * (double)image_corners->x / (image_corners->ne_long - image_corners->sw_long));
+		dlg_y_high = image_corners->y - 1 - round((lat_sw - image_corners->sw_lat) * (double)image_corners->y / (image_corners->ne_lat - image_corners->sw_lat));
+		if (dlg_x_low < -1)  {
+			dlg_x_low = -1;
+		}
+		if (dlg_y_low < -1)  {
+			dlg_y_low = -1;
+		}
+		if (dlg_x_high >= image_corners->x)  {
+			dlg_x_high = image_corners->x - 1;
+		}
+		if (dlg_y_high >= image_corners->y)  {
+			dlg_y_high = image_corners->y - 1;
+		}
+
+		/*
+		 * Cycle through all of the line data and draw all of the appropriate lines
+		 * onto the image (overlaying any previous data).
+		 */
+		for (i = 0; i < num_lines; i++)  {
+			/*
+			 * In the DLG-3 format, the first area element listed
+			 * represents the universe outside of the map area.
+			 * (In SDTS, this is apparently no longer encodes as an area,
+			 * but is stored in connection with the definition of the data boundaries.)
+			 * Thus, lines that have area 1 as a boundary should be
+			 * "neatlines" that bound the map area.
+			 * Since these clutter up a map, we normally discard them.
+			 * (If you want to keep them, then change the #define of OMIT_NEATLINES
+			 * so that it is zero, rather than non-zero.)
+			 *
+			 * Here are relevant quotes from the DLG-3 guide:
+			 *
+			 *	expressed by network data is that of connectivity.  The network case
+			 *	differs from the area case in that, irrespective of the number of closed
+			 *	areas forming the graph, only two areas are encoded:  (1) the area out-
+			 *	side the graph, termed the outside area; and (2) the area within the
+			 *	graph, termed the background area.  All lines except the graph boundary,
+			 *	or neatline, are considered to be contained within the background area.
+			 *
+			 *	map border.  There is one outside area for each DLG-3. It is always the
+			 *	first area encountered (its ID is 1) and has the attribute code 000 0000.
+			 */
+
+			/*
+			 * If the user provided a file full of attributes, then
+			 * use them to control whether or not the lines are drawn.
+			 * If not, then just go ahead and draw everything.
+			 *
+			 * Note:  If a major or minor attribute code in the attribute
+			 *        file (supplied by the user) is less than
+			 *        zero, it is treated as a wild card and matches
+			 *        anything.
+			 */
+			if ((num_A_attrib > 0) || (num_L_attrib > 0))  {
+				if ((OMIT_NEATLINES == 0) || ((lines[i].left_area != 1) && (lines[i].right_area != 1)))  {
+					current_attrib = &lines[i].attribute;
+					if (*current_attrib != (struct attribute *)0)  {
+						while (*current_attrib != (struct attribute *)0)  {
+							for (j = 0; j < num_L_attrib; j++)  {
+								if (((attributes_L[j].major < 0) ||
+								     (attributes_L[j].major == ((*current_attrib)->major))) &&
+								    ((attributes_L[j].minor < 0) ||
+								     (attributes_L[j].minor == ((*current_attrib)->minor))))  {
+									draw_lines(&datum, lines[i].point, color, image_corners);
+									goto FIN1;
+								}
+							}
+							current_attrib = &((*current_attrib)->attribute);
+						}
+					}
+					else  {
+						/*
+						 * If the feature had no attribute codes, then check if
+						 * it is covered by a wild card in the attributes file.
+						 */
+						for (j = 0; j < num_L_attrib; j++)  {
+							if (((attributes_L[j].major < 0) ||
+							     (attributes_L[j].major == data_type)) &&
+							    (attributes_L[j].minor < 0))  {
+								draw_lines(&datum, lines[i].point, color, image_corners);
+								goto FIN1;
+							}
+						}
+					}
+				}
+
+				/*
+				 * For those (hopefully rare) occasions in which something
+				 * goes wrong, we provide the capability for a user to
+				 * specifically request a single line from a DLG file so that
+				 * the cause of the problem can be isolated.
+				 * The user specifies a specific line by providing a major
+				 * attribute number of 10000, and a minor attribute number
+				 * equal to the desired line ID number.  Since no
+				 * valid attribute (as far as I know) is ever as large as
+				 * 10,000, such user-specified attribute pairs will not
+				 * affect the search for legitimate attributes above (since
+				 * they can't possibly match anything).  If we reach this point,
+				 * then we failed to draw a line due to the legitimate-attribute
+				 * checks above; so we give it one more try here, based on
+				 * user-requested ID numbers.
+				 *
+				 * Note:  If you are using this feature, then it doesn't make
+				 *        a lot of sense to process more than one DLG file,
+				 *        since the ID number you give (as the minor attribute)
+				 *        will be matched in every DLG file that has a
+				 *        Line with that ID.  If you are trying to isolate
+				 *        one (or a few) Line(s), then you probably want to
+				 *        be certain which file is the source of the data.
+				 */
+				for (j = 0; j < num_L_attrib; j++)  {
+					if ((attributes_L[j].major == 10000) &&
+					     (attributes_L[j].minor == lines[i].id))  {
+						draw_lines(&datum, lines[i].point, color, image_corners);
+						goto FIN1;
+					}
+				}
+			}
+			else  {
+				if ((OMIT_NEATLINES == 0) || ((lines[i].left_area != 1) && (lines[i].right_area != 1)))  {
+					draw_lines(&datum, lines[i].point, color, image_corners);
+				}
+			}
+FIN1:
+			{;}
+		}
+
+		/*
+		 * Now we fill in each interesting area on the map with the
+		 * same color that bounds the area.  (For example,
+		 * lakes (attribute code 050 0421) might be filled in.)
+		 * However, sometimes areas might be filled in improperly.
+		 * The code assumes that the reference point for an area falls
+		 * within the polygon of lines that define that area.
+		 * According to the DLG guide, this isn't guaranteed
+		 * to always be the case, but the assumption has nonetheless
+		 * worked reasonably well in practice.
+		 *
+		 * Area attributes are processed a bit differently than the
+		 * attributes for lines:  no areas are filled in automatically.
+		 * If the user did not specify any Area attributes in the attribute
+		 * file, then no areas are filled in.  This is because the area-fill
+		 * algorithm can occasionally run amok, and therefore the appropriate
+		 * default is to not give it a chance.  For extensive details on the
+		 * area-fill algorithm, see the comments at the top of fill_area().
+		 */
+		if (num_A_attrib > 0)  {
+			for (i = 0; i < num_areas; i++)  {
+				if (areas[i].number_attrib <= 0)  {
+					continue;
+				}
+
+				current_attrib = &areas[i].attribute;
+				while (*current_attrib != (struct attribute *)0)  {
+					for (j = 0; j < num_A_attrib; j++)  {
+						if (((attributes_A[j].major < 0) ||
+						     (attributes_A[j].major == ((*current_attrib)->major))) &&
+						    ((attributes_A[j].minor < 0) ||
+						     (attributes_A[j].minor == ((*current_attrib)->minor))))  {
+							fill_area(&datum, areas[i].x, areas[i].y, color, image_corners);
+							goto FIN2;
+						}
+					}
+					current_attrib = &((*current_attrib)->attribute);
+				}
+
+				/*
+				 * As with the Line attributes, we provide an interface
+				 * for the user to select specific areas, via their IDs.
+				 */
+				for (j = 0; j < num_A_attrib; j++)  {
+					if ((attributes_A[j].major == 10000) &&
+					     (attributes_A[j].minor == areas[i].id))  {
+						fill_area(&datum, areas[i].x, areas[i].y, color, image_corners);
+						goto FIN2;
+					}
+				}
+FIN2:
+				{;}
+			}
+		}
+	}
+
+
+
+	/* Free up all of the malloc() memory */
+	for (i = 0; i < num_lines; i++)  {
+		if (lines[i].number_coords > 0)  {
+			current_point = &lines[i].point;
+
+			while (*current_point != (struct point *)0)  {
+				tmp_point = (*current_point)->point;
+				free(*current_point);
+				*current_point = tmp_point;
+			}
+		}
+		if (lines[i].number_attrib > 0)  {
+			current_attrib = &lines[i].attribute;
+
+			while (*current_attrib != (struct attribute *)0)  {
+				tmp_attrib = (*current_attrib)->attribute;
+				free(*current_attrib);
+				*current_attrib = tmp_attrib;
+			}
+		}
+	}
+	for (i = 0; i < num_areas; i++)  {
+		if (areas[i].number_attrib > 0)  {
+			current_attrib = &areas[i].attribute;
+
+			while (*current_attrib != (struct attribute *)0)  {
+				tmp_attrib = (*current_attrib)->attribute;
+				free(*current_attrib);
+				*current_attrib = tmp_attrib;
+			}
+		}
+	}
+	for (i = 0; i < num_nodes; i++)  {
+		if (nodes[i].number_attrib > 0)  {
+			current_attrib = &nodes[i].attribute;
+
+			while (*current_attrib != (struct attribute *)0)  {
+				tmp_attrib = (*current_attrib)->attribute;
+				free(*current_attrib);
+				*current_attrib = tmp_attrib;
+			}
+		}
+	}
+	for (i = 0; i < num_attrib_files; i++)  {
+		free(attrib_files[i].attrib);
+	}
+}
+
+
+
+
+
+/*
+ * Feature definitions for the additional
+ * features associated with primary attributes.
+ */
+#define NUM_FEATURES 117
+struct	features  {
+	long key;		// An internal key used to keep track of the entries
+	long main_major;	// The major attribute number for this category
+	long major;		// The major attribute number for this category, modified to suit individual features
+	long minor;		// The minor attribute number for this feature
+	char *feature_name;
+	long feature_name_length;
+} features[NUM_FEATURES] =  {
+  0,	 20,	 20,	202,	"SUPPLEMENTARY",	13,	// Hypsography
+  1,	 20,	 20,	204,	"AMENDED",		 7,
+  2,	 20,	 20,	610,	"APPROXIMATE",		11,
+  3,	 20,	 20,	611,	"DEPRESSION",		10,
+  4,	 20,	 20,	612,	"GLACIER_OR_SNOW",	15,
+  5,	 20,	 20,	613,	"UNDERWATER",		10,
+  6,	 20,	 20,	614,	"BEST_ESTIMATE",	13,
+  7,	 20,	 26,	 -1,	"SPOT_CATEGORY",	13,
+  8,	 20,	 26,	  0,	"PHOTOREVISED",		12,
+  9,	 50,	 50,	  0,	"PHOTOREVISED",		12,	// Hydrography
+ 10,	 50,	 50,	 -1,	"RELATION_TO_GROUND",	18,
+ 11,	 50,	 50,	 -1,	"VERTICAL_RELATION",	17,
+ 12,	 50,	 50,	 -1,	"BANK",			 4,
+ 13,	 50,	 50,	 -1,	"OPERATIONAL_STATUS",	18,
+ 14,	 50,	 50,	608,	"SALT",			 4,
+ 15,	 50,	 50,	609,	"UNSURVEYED",		10,
+ 16,	 50,	 50,	610,	"INTERMITTENT",		12,
+ 17,	 50,	 50,	612,	"SUBMERGED",		 9,
+ 18,	 50,	 50,	614,	"DRY",			 3,
+ 19,	 50,	 50,	615,	"MINERAL_OR_HOT",	14,
+ 20,	 50,	 50,	616,	"NAVIGABLE",		 9,
+ 21,	 50,	 50,	618,	"EARTHEN",		 7,
+ 22,	 50,	 50,	619,	"INTERPOLATED",		12,
+ 23,	 50,	 -1,	 -1,	"ELEVATION",		 9,
+ 24,	 50,	 53,	 -1,	"ROTATION_ANGLE",	14,
+ 25,	 50,	 55,	 -1,	"RIVER_MILE",		10,
+ 26,	 50,	 58,	  0,	"BEST_ESTIMATE",	13,
+ 27,	 70,	 78,	  0,	"BEST_ESTIMATE",	13,	// Vegetative Surface Cover
+ 28,	 80,	 80,	  0,	"PHOTOREVISED",		12,	// Non-Vegetative Features
+ 29,	 80,	 88,	  0,	"BEST_ESTIMATE",	13,
+ 30,	 90,	 90,	100,	"CIVIL_TOWNSHIP",	14,	// Boundaries
+ 31,	 90,	 90,	101,	"CITY",			 4,
+ 32,	 90,	 90,	104,	"NATIONAL_FOREST",	15,
+ 33,	 90,	 90,	106,	"WILDERNESS_AREA",	15,
+ 34,	 90,	 90,	135,	"AHUPUAA",		 7,
+ 35,	 90,	 90,	136,	"HAWAIIAN_HOMESTEAD",	18,
+ 36,	 90,	 90,	401,	"FEDERALLY_ADMIN",	15,
+ 37,	 90,	 90,	601,	"IN_DISPUTE",		10,
+ 38,	 90,	 91,	 -1,	"STATE",		 5,
+ 39,	 90,	 92,	 -1,	"COUNTY",		 6,
+ 40,	 90,	 -1,	 -1,	"TOWNSHIP_CODE",	13,
+ 41,	 90,	 90,	  0,	"PHOTOREVISED",		12,
+ 42,	 90,	 -1,	 -1,	"MONUMENT_NUMBER",	15,
+ 43,	 90,	 98,	  0,	"BEST_ESTIMATE",	13,
+ 44,	150,	151,	 -1,	"STATE",		 5,	// Survey Control
+ 45,	150,	152,	 -1,	"COUNTY",		 6,
+ 46,	150,	 -1,	 -1,	"ELEVATION",		 9,
+ 47,	170,	170,	216,	"ARBITRARY_EXT",	13,	// Roads and Trails
+ 48,	170,	170,	 -1,	"RELATION_TO_GROUND",	18,
+ 49,	170,	170,	 -1,	"VERTICAL_RELATION",	17,
+ 50,	170,	170,	 -1,	"OPERATIONAL_STATUS",	18,
+ 51,	170,	170,	 -1,	"ACCESS_RESTRICTION",	18,
+ 52,	170,	170,	605,	"OLD_RAILROAD_GRADE",	18,
+ 53,	170,	170,	623,	"WITH_RAILROAD",	13,
+ 54,	170,	170,	624,	"COVERED",		 7,
+ 55,	170,	170,	600,	"HISTORICAL",		10,
+ 56,	170,	170,	608,	"LIMITED_ACCESS",	14,
+ 57,	170,	170,	  0,	"PHOTOREVISED",		12,
+ 58,	170,	171,	 -1,	"LANES",		 5,
+ 59,	170,	170,	 -1,	"ROAD_WIDTH",		10,
+ 60,	170,	178,	  0,	"BEST_ESTIMATE",	13,
+ 61,	180,	180,	 -1,	"RELATION_TO_GROUND",	18,	// Railroads
+ 62,	180,	180,	 -1,	"VERTICAL_RELATION",	17,
+ 63,	180,	180,	 -1,	"OPERATIONAL_STATUS",	18,
+ 64,	180,	180,	 -1,	"ACCESS_RESTRICTIONS",	19,
+ 65,	180,	180,	606,	"NARROW_GAUGE",		12,
+ 66,	180,	180,	607,	"IN_SNOWSHED",		11,
+ 67,	180,	180,	610,	"RAPID_TRANSIT",	13,
+ 68,	180,	180,	614,	"JUXTAPOSITION",	13,
+ 69,	180,	180,	210,	"ARBITRARY_EXT",	13,
+ 70,	180,	180,	600,	"HISTORICAL",		10,
+ 71,	180,	180,	  0,	"PHOTOREVISED",		12,
+ 72,	180,	181,	 -1,	"TRACKS",		 6,
+ 73,	180,	183,	 -1,	"ROTATION_ANGLE",	14,
+ 74,	180,	188,	  0,	"BEST_ESTIMATE",	13,
+ 75,	190,	190,	 -1,	"RELATION_TO_GROUND",	18,	// Pipelines
+ 76,	190,	190,	 -1,	"OPERATIONAL_STATUS",	18,
+ 77,	190,	190,	605,	"UNIMPROVED",		10,
+ 78,	190,	190,	607,	"NUCLEAR",		 7,
+ 79,	190,	190,	205,	"ARBITRARY_EXT",	13,
+ 80,	190,	190,	  0,	"PHOTOREVISED",		12,
+ 81,	190,	193,	 -1,	"ROTATION_ANGLE",	14,
+ 82,	190,	198,	  0,	"BEST_ESTIMATE",	13,
+ 83,	190,	196,	 -1,	"STATE",		 5,
+ 84,	190,	197,	 -1,	"AIRPORT",		 7,
+ 85,	200,	200,	 -1,	"RELATION_TO_GROUND",	18,	// Manmade Features
+ 86,	200,	200,	 -1,	"OPERATIONAL_STATUS",	18,
+ 87,	200,	200,	 -1,	"PRODUCT",		 7,
+ 88,	200,	200,	608,	"COVERED",		 7,
+ 89,	200,	200,	 -1,	"TOWER_TYPE",		10,
+ 90,	200,	200,	615,	"UNINCORPORATED",	14,
+ 91,	200,	200,	616,	"NO_POPULATION",	13,
+ 92,	200,	200,	690,	"NATIONAL_CAPITAL",	16,
+ 93,	200,	200,	691,	"STATE_CAPITAL",	13,
+ 94,	200,	200,	692,	"COUNTY_SEAT",		11,
+ 95,	200,	200,	 -1,	"POPULATION_CLASS",	16,
+ 96,	200,	200,	  0,	"PHOTOREVISED",		12,
+ 97,	200,	202,	 -1,	"WIDTH",		 5,
+ 98,	200,	203,	 -1,	"ROTATION_ANGLE",	14,
+ 99,	200,	208,	  0,	"BEST_ESTIMATE",	13,
+100,	200,	206,	 -1,	"STATE",		 5,
+101,	200,	207,	 -1,	"POPULATED_PLACE",	15,
+102,	300,	300,	 40,	"ID_IN_FIELD",		11,	// Public Land Survey
+103,	300,	300,	 41,	"WITH_HORIZONTAL",	15,
+104,	300,	300,	 42,	"WITH_ELEVATION",	14,
+105,	300,	300,	201,	"APPROXIMATE_POS",	15,
+106,	300,	300,	202,	"PROTRACTED_POS",	14,
+107,	300,	306,	 -1,	"ORIGIN_OF_SURVEY",	16,
+108,	300,	 -1,	 -1,	"TOWNSHIP",		 8,
+109,	300,	 -1,	 -1,	"RANGE",		 5,
+110,	300,	301,	 -1,	"SECTION",		 7,
+111,	300,	307,	 -1,	"LAND_GRANT",		10,
+112,	300,	 -1,	 -1,	"MONUMENT_NUMBER",	15,
+113,	300,	308,	  0,	"BEST_ESTIMATE",	13,
+114,	300,	306,	 -1,	"OHIO_NAMED_SURVEY",	17,
+115,	300,	300,	612,	"REFUGEE_LANDS",	13,
+116,	190,	190,	605,	"UNPAVED",		 7,
+};
+
+
+
+
+
+/*
+ * A primary attribute may have additional attribute characteristics
+ * associated with it.
+ * Process this addtional attribute information.
+ */
+long
+get_extra_attrib(long category_major, long *major, long *minor, long *major2, long *minor2, struct subfield *subfield)
+{
+	long i, j;
+	double f;
+	char save_byte;
+	char *end_ptr;
+
+
+	/*
+	 * Do some preliminary checks to avoid a lot of
+	 * unnecessary processing.
+	 */
+	if (subfield->length <= 0)  {
+		return -1;
+	}
+	if ((subfield->length == 1)  && (subfield->value[0] == ' '))  {
+		return 1;
+	}
+	if ((subfield->length == 2) && (subfield->value[0] == ' ') && (subfield->value[1] == ' '))  {
+		return 1;
+	}
+
+	for (i = 0; i < NUM_FEATURES; i++)  {
+		if (features[i].main_major == category_major)  {
+			if (strncmp(subfield->label, features[i].feature_name, features[i].feature_name_length) == 0)  {
+				break;
+			}
+		}
+	}
+	if (i == NUM_FEATURES)  {
+		fprintf(stderr, "Couldn't find attribute feature name (%s) for major %d.  Attribute feature ignored.\n", subfield->label, category_major);
+		return 1;
+	}
+
+
+
+	switch (category_major)  {
+	case HYPSOGRAPHY:
+		switch (features[i].key)  {
+		case 0:
+		case 1:
+		case 2:
+		case 3:
+		case 4:
+		case 5:
+		case 6:
+		case 8:
+			if (subfield->value[0] == 'Y')  {
+				*major = features[i].major;
+				*minor = features[i].minor;
+				return 0;
+			}
+			else  {
+				return 1;
+			}
+		case 7:
+			if ((subfield->length != 2) || ((subfield->value[0] == ' ') && (subfield->value[1] == ' ')))  {
+				return 1;
+			}
+			*major = features[i].major;
+			save_byte = subfield->value[subfield->length]; subfield->value[subfield->length] = '\0';
+			*minor =  strtol(subfield->value, (char **)0, 10);
+			subfield->value[subfield->length] = save_byte;
+			return 0;
+		default:
+			break;
+		}
+		break;
+
+	case HYDROGRAPHY:
+		switch (features[i].key)  {
+		case  9:
+	 	case 14:
+	 	case 15:
+	 	case 16:
+	 	case 17:
+	 	case 18:
+	 	case 19:
+	 	case 20:
+	 	case 21:
+	 	case 22:
+	 	case 26:
+			if (subfield->value[0] == 'Y')  {
+				*major = features[i].major;
+				*minor = features[i].minor;
+				return 0;
+			}
+			else  {
+				return 1;
+			}
+	 	case 10:
+			*major = features[i].major;
+	 		if (subfield->value[0] == 'U')  {
+				*minor = 601;
+			}
+	 		else if (subfield->value[0] == 'E')  {
+				*minor = 603;
+			}
+	 		else if (subfield->value[0] == 'T')  {
+				*minor = 604;
+			}
+			else  {
+				return 1;
+			}
+			return 0;
+	 	case 11:
+			*major = features[i].major;
+	 		if (subfield->value[0] == 'O')  {
+				*minor = 602;
+			}
+	 		else if (subfield->value[0] == 'U')  {
+				*minor = 617;
+			}
+			else  {
+				return 1;
+			}
+			return 0;
+	 	case 12:
+			*major = features[i].major;
+	 		if (subfield->value[0] == 'R')  {
+				*minor = 605;
+			}
+	 		else if (subfield->value[0] == 'L')  {
+				*minor = 606;
+			}
+			else  {
+				return 1;
+			}
+			return 0;
+	 	case 13:
+			*major = features[i].major;
+	 		if (subfield->value[0] == 'U')  {
+				*minor = 607;
+			}
+	 		else if (subfield->value[0] == 'A')  {
+				*minor = 611;
+			}
+			else  {
+				return 1;
+			}
+			return 0;
+	 	case 23:
+			if (strncmp(subfield->value, "-9999.99", 8) == 0)  {
+				return 1;	// -9999.99 indicates unused
+			}
+			save_byte = subfield->value[subfield->length]; subfield->value[subfield->length] = '\0';
+			f =  strtod(subfield->value, (char **)0);
+			subfield->value[subfield->length] = save_byte;
+			if (f < 0.0)  {
+				*major = 57;
+				*minor = -round(f);
+			}
+			else  {
+				*major = 52;
+				*minor = round(f);
+			}
+			return 0;
+	 	case 24:
+			if (strncmp(subfield->value, "-99", 3) == 0)  {
+				return 1;	// -99 indicates unused
+			}
+			*major = features[i].major;
+			save_byte = subfield->value[subfield->length]; subfield->value[subfield->length] = '\0';
+			*minor =  strtol(subfield->value, (char **)0, 10);
+			subfield->value[subfield->length] = save_byte;
+			return 0;
+	 	case 25:
+			if (strncmp(subfield->value, "-999.99", 6) == 0)  {
+				return 1;	// -999.99 indicates unused
+			}
+			*major = features[i].major;
+			save_byte = subfield->value[subfield->length]; subfield->value[subfield->length] = '\0';
+			f =  strtod(subfield->value, (char **)0);
+			subfield->value[subfield->length] = save_byte;
+			*minor = round(f);
+			return 0;
+		default:
+			break;
+		};
+		break;
+
+	case VEG_SURFACE_COVER:
+		switch (features[i].key)  {
+ 		case 27:
+			if (subfield->value[0] == 'Y')  {
+				*major = features[i].major;
+				*minor = features[i].minor;
+				return 0;
+			}
+			else  {
+				return 1;
+			}
+		default:
+			break;
+		};
+		break;
+
+	case NON_VEG_FEATURES:
+		switch (features[i].key)  {
+ 		case 28:
+ 		case 29:
+			if (subfield->value[0] == 'Y')  {
+				*major = features[i].major;
+				*minor = features[i].minor;
+				return 0;
+			}
+			else  {
+				return 1;
+			}
+		default:
+			break;
+		};
+		break;
+
+	case BOUNDARIES:
+		switch (features[i].key)  {
+ 		case 30:
+ 		case 31:
+ 		case 32:
+ 		case 33:
+ 		case 34:
+ 		case 35:
+ 		case 36:
+ 		case 37:
+ 		case 41:
+ 		case 43:
+			if (subfield->value[0] == 'Y')  {
+				*major = features[i].major;
+				*minor = features[i].minor;
+				return 0;
+			}
+			else  {
+				return 1;
+			}
+		case 38:
+			if ((subfield->length != 2) || ((subfield->value[0] == ' ') &&
+							(subfield->value[1] == ' ')))  {
+				return 1;
+			}
+			*major = features[i].major;
+			save_byte = subfield->value[subfield->length]; subfield->value[subfield->length] = '\0';
+			*minor =  strtol(subfield->value, (char **)0, 10);
+			subfield->value[subfield->length] = save_byte;
+			return 0;
+		case 39:
+			if ((subfield->length != 3) || ((subfield->value[0] == ' ') &&
+							(subfield->value[1] == ' ') &&
+							(subfield->value[2] == ' ')))  {
+				return 1;
+			}
+			*major = features[i].major;
+			save_byte = subfield->value[subfield->length]; subfield->value[subfield->length] = '\0';
+			*minor =  strtol(subfield->value, (char **)0, 10);
+			subfield->value[subfield->length] = save_byte;
+			return 0;
+		case 40:
+			if ((subfield->length != 5) || ((subfield->value[0] == ' ') &&
+							(subfield->value[1] == ' ') &&
+							(subfield->value[2] == ' ') &&
+							(subfield->value[3] == ' ') &&
+							(subfield->value[4] == ' ')))  {
+				return 1;
+			}
+			save_byte = subfield->value[subfield->length]; subfield->value[subfield->length] = '\0';
+			*minor =  strtol(subfield->value, (char **)0, 10);
+			subfield->value[subfield->length] = save_byte;
+			*major = 93;	// Carries first two digits of 5-digit code
+			*major2 = 94;	// Carries last three digits of 5-digit code
+			*minor2 = *minor % 1000;
+			*minor = *minor / 1000;
+			return 0;
+		case 42:
+			if ((subfield->length != 8) || ((subfield->value[0] == ' ') &&
+							(subfield->value[1] == ' ') &&
+							(subfield->value[2] == ' ') &&
+							(subfield->value[3] == ' ') &&
+							(subfield->value[4] == ' ') &&
+							(subfield->value[5] == ' ') &&
+							(subfield->value[6] == ' ') &&
+							(subfield->value[7] == ' ')))  {
+				return 1;
+			}
+			save_byte = subfield->value[subfield->length]; subfield->value[subfield->length] = '\0';
+			*minor =  strtol(subfield->value, (char **)0, 10);
+			subfield->value[subfield->length] = save_byte;
+			*major = 95;
+			/*
+			 * So far, when I check the original DLG-3 (non-SDTS) source material,
+			 * the monument number attributes aren't included.  In other words, if I comment out
+			 * this whole case, the resulting SDTS-to-DLG conversion will match the
+			 * original.  If I leave the case uncommented, the conversion won't match
+			 * the original.
+			 *
+			 * Note that there is also a major code 96, for the alphabetic portion of
+			 * a monument number.  I'm not sure how this is handled, since I've yet
+			 * to find an example.
+			 */
+			return 0;
+		default:
+			break;
+		};
+		break;
+
+	case SURVEY_CONTROL:
+		switch (features[i].key)  {
+		case 44:
+			if ((subfield->length != 2) || ((subfield->value[0] == ' ') &&
+							(subfield->value[1] == ' ')))  {
+				return 1;
+			}
+			*major = features[i].major;
+			save_byte = subfield->value[subfield->length]; subfield->value[subfield->length] = '\0';
+			*minor =  strtol(subfield->value, (char **)0, 10);
+			subfield->value[subfield->length] = save_byte;
+			return 0;
+		case 45:
+			if ((subfield->length != 3) || ((subfield->value[0] == ' ') &&
+							(subfield->value[1] == ' ') &&
+							(subfield->value[2] == ' ')))  {
+				return 1;
+			}
+			*major = features[i].major;
+			save_byte = subfield->value[subfield->length]; subfield->value[subfield->length] = '\0';
+			*minor =  strtol(subfield->value, (char **)0, 10);
+			subfield->value[subfield->length] = save_byte;
+			return 0;
+	 	case 46:
+			/*
+			 * The spec says 9999.99 means not applicable.
+			 * This is at odds with other categories of data, which use -9999.99.
+			 * Assume that the spec is wrong, since it wouldn't make much sense to
+			 * restrict ourselves to elevations that don't equal 9999.99.
+			 */
+//			if (strncmp(subfield->value, "9999.99", 7) == 0)  {
+//				return 1;	// 9999.99 indicates unused
+//			}
+			if (strncmp(subfield->value, "-9999.99", 8) == 0)  {
+				return 1;	// -9999.99 indicates unused
+			}
+			save_byte = subfield->value[subfield->length]; subfield->value[subfield->length] = '\0';
+			f =  strtod(subfield->value, (char **)0);
+			subfield->value[subfield->length] = save_byte;
+			if (f < 0.0)  {
+				*major = 157;
+				*minor = -round(f);
+			}
+			else  {
+				*major = 154;
+				*minor = round(f);
+			}
+			return 0;
+		default:
+			break;
+		};
+		break;
+
+	case ROADS_AND_TRAILS:
+		switch (features[i].key)  {
+ 		case 47:
+ 		case 52:
+ 		case 53:
+ 		case 54:
+ 		case 55:
+ 		case 56:
+ 		case 57:
+ 		case 60:
+			if (subfield->value[0] == 'Y')  {
+				*major = features[i].major;
+				*minor = features[i].minor;
+				return 0;
+			}
+			else  {
+				return 1;
+			}
+		case 48:
+			*major = features[i].major;
+	 		if (subfield->value[0] == 'T')  {
+				*minor = 601;
+			}
+	 		else if (subfield->value[0] == 'S')  {
+				*minor = 606;
+			}
+	 		else if (subfield->value[0] == 'D')  {
+				*minor = 612;
+			}
+	 		else if (subfield->value[0] == 'E')  {
+				*minor = 614;
+			}
+	 		else if (subfield->value[0] == 'R')  {
+				*minor = 618;
+			}
+			else  {
+				return 1;
+			}
+			return 0;
+	 	case 49:
+			*major = features[i].major;
+	 		if (subfield->value[0] == 'O')  {
+				*minor = 602;
+			}
+	 		else if (subfield->value[0] == 'U')  {
+				*minor = 607;
+			}
+			else  {
+				return 1;
+			}
+			return 0;
+	 	case 50:
+			*major = features[i].major;
+	 		if (subfield->value[0] == 'U')  {
+				*minor = 603;
+			}
+	 		else if (subfield->value[0] == 'X')  {
+				*minor = 604;
+			}
+	 		else if (subfield->value[0] == 'P')  {
+				*minor = 611;
+			}
+			else  {
+				return 1;
+			}
+			return 0;
+	 	case 51:
+			*major = features[i].major;
+	 		if (subfield->value[0] == 'T')  {
+				*minor = 609;
+			}
+	 		else if (subfield->value[0] == 'P')  {
+				*minor = 610;
+			}
+			else  {
+				return 1;
+			}
+			return 0;
+		case 58:
+			/*
+			 * The spec says this field is all blanks if unused.
+			 * However, sample files have it as "-9".
+			 * Check it both ways.
+			 */
+			if ((subfield->length != 2) || ((subfield->value[0] == ' ') &&
+							(subfield->value[1] == ' ')))  {
+				return 1;
+			}
+			if (strncmp(subfield->value, "-9", 2) == 0)  {
+				return 1;	// -9 indicates unused
+			}
+			*major = features[i].major;
+			save_byte = subfield->value[subfield->length]; subfield->value[subfield->length] = '\0';
+			*minor =  strtol(subfield->value, (char **)0, 10);
+			subfield->value[subfield->length] = save_byte;
+			return 0;
+	 	case 59:
+			if (strncmp(subfield->value, "-99", 3) == 0)  {
+				return 1;	// -99 indicates unused
+			}
+			*major = features[i].major;
+			save_byte = subfield->value[subfield->length]; subfield->value[subfield->length] = '\0';
+			*minor = 600 + strtol(subfield->value, (char **)0, 10);
+			subfield->value[subfield->length] = save_byte;
+			return 0;
+		default:
+			break;
+		};
+		break;
+
+	case RAILROADS:
+		switch (features[i].key)  {
+ 		case 65:
+ 		case 66:
+ 		case 67:
+ 		case 68:
+ 		case 69:
+ 		case 70:
+ 		case 71:
+ 		case 74:
+			if (subfield->value[0] == 'Y')  {
+				*major = features[i].major;
+				*minor = features[i].minor;
+				return 0;
+			}
+			else  {
+				return 1;
+			}
+		case 61:
+			*major = features[i].major;
+	 		if (subfield->value[0] == 'T')  {
+				*minor = 601;
+			}
+	 		else if (subfield->value[0] == 'E')  {
+				*minor = 609;
+			}
+	 		else if (subfield->value[0] == 'R')  {
+				*minor = 611;
+			}
+			else  {
+				return 1;
+			}
+			return 0;
+	 	case 62:
+			*major = features[i].major;
+	 		if (subfield->value[0] == 'O')  {
+				*minor = 602;
+			}
+	 		else if (subfield->value[0] == 'U')  {
+				*minor = 605;
+			}
+			else  {
+				return 1;
+			}
+			return 0;
+	 	case 63:
+			*major = features[i].major;
+	 		if (subfield->value[0] == 'A')  {
+				*minor = 603;
+			}
+	 		else if (subfield->value[0] == 'D')  {
+				*minor = 604;
+			}
+	 		else if (subfield->value[0] == 'U')  {
+				*minor = 608;
+			}
+			else  {
+				return 1;
+			}
+			return 0;
+	 	case 64:
+			*major = features[i].major;
+	 		if (subfield->value[0] == 'P')  {
+				*minor = 612;
+			}
+	 		else if (subfield->value[0] == 'G')  {
+				*minor = 613;
+			}
+			else  {
+				return 1;
+			}
+			return 0;
+		case 72:
+			if (strncmp(subfield->value, "-9", 2) == 0)  {
+				return 1;	// -9 indicates unused
+			}
+			*major = features[i].major;
+			save_byte = subfield->value[subfield->length]; subfield->value[subfield->length] = '\0';
+			*minor =  strtol(subfield->value, (char **)0, 10);
+			subfield->value[subfield->length] = save_byte;
+			return 0;
+	 	case 73:
+			if (strncmp(subfield->value, "-99", 3) == 0)  {
+				return 1;	// -99 indicates unused
+			}
+			*major = features[i].major;
+			save_byte = subfield->value[subfield->length]; subfield->value[subfield->length] = '\0';
+			*minor = strtol(subfield->value, (char **)0, 10);
+			subfield->value[subfield->length] = save_byte;
+			return 0;
+		default:
+			break;
+		};
+		break;
+
+	case PIPE_TRANS_LINES:
+		switch (features[i].key)  {
+ 		case 77:
+ 		case 78:
+ 		case 79:
+ 		case 80:
+ 		case 82:
+ 		case 116:
+			if (subfield->value[0] == 'Y')  {
+				*major = features[i].major;
+				*minor = features[i].minor;
+				return 0;
+			}
+			else  {
+				return 1;
+			}
+	 	case 75:
+			*major = features[i].major;
+	 		if (subfield->value[0] == 'U')  {
+				*minor = 600;
+			}
+	 		else if (subfield->value[0] == 'A')  {
+				*minor = 603;
+			}
+	 		else if (subfield->value[0] == 'S')  {
+				*minor = 606;
+			}
+			else  {
+				return 1;
+			}
+			return 0;
+	 	case 76:
+			*major = features[i].major;
+	 		if (subfield->value[0] == 'U')  {
+				*minor = 601;
+			}
+	 		else if (subfield->value[0] == 'A')  {
+				*minor = 602;
+			}
+	 		else if (subfield->value[0] == 'C')  {
+				*minor = 604;
+			}
+			else  {
+				return 1;
+			}
+			return 0;
+	 	case 81:
+			if (strncmp(subfield->value, "-99", 3) == 0)  {
+				return 1;	// -99 indicates unused
+			}
+			*major = features[i].major;
+			save_byte = subfield->value[subfield->length]; subfield->value[subfield->length] = '\0';
+			*minor = strtol(subfield->value, (char **)0, 10);
+			subfield->value[subfield->length] = save_byte;
+			return 0;
+		case 83:
+			if ((subfield->length != 2) || ((subfield->value[0] == ' ') &&
+							(subfield->value[1] == ' ')))  {
+				return 1;
+			}
+			*major = features[i].major;
+			save_byte = subfield->value[subfield->length]; subfield->value[subfield->length] = '\0';
+			*minor =  strtol(subfield->value, (char **)0, 10);
+			subfield->value[subfield->length] = save_byte;
+			return 0;
+		case 84:
+			if ((subfield->length != 4) || ((subfield->value[0] == ' ') &&
+							(subfield->value[1] == ' ') &&
+							(subfield->value[3] == ' ') &&
+							(subfield->value[4] == ' ')))  {
+				return 1;
+			}
+			*major = features[i].major;
+			save_byte = subfield->value[subfield->length]; subfield->value[subfield->length] = '\0';
+			*minor =  strtol(subfield->value, (char **)0, 10);
+			subfield->value[subfield->length] = save_byte;
+			return 0;
+		default:
+			break;
+		};
+		break;
+
+	case MANMADE_FEATURES:
+		switch (features[i].key)  {
+ 		case 88:
+ 		case 90:
+ 		case 91:
+ 		case 92:
+ 		case 93:
+ 		case 94:
+ 		case 96:
+ 		case 99:
+			if (subfield->value[0] == 'Y')  {
+				*major = features[i].major;
+				*minor = features[i].minor;
+				return 0;
+			}
+			else  {
+				return 1;
+			}
+	 	case 85:
+			*major = features[i].major;
+	 		if (subfield->value[0] == 'U')  {
+				*minor = 601;
+			}
+	 		else if (subfield->value[0] == 'S')  {
+				*minor = 617;
+			}
+			else  {
+				return 1;
+			}
+			return 0;
+	 	case 86:
+			*major = features[i].major;
+	 		if (subfield->value[0] == 'C')  {
+				*minor = 602;
+			}
+	 		else if (subfield->value[0] == 'A')  {
+				*minor = 603;
+			}
+	 		else if (subfield->value[0] == 'R')  {
+				*minor = 618;
+			}
+			else  {
+				return 1;
+			}
+			return 0;
+	 	case 87:
+			*major = features[i].major;
+	 		if (subfield->value[0] == 'W')  {
+				*minor = 604;
+			}
+	 		else if (subfield->value[0] == 'O')  {
+				*minor = 605;
+			}
+	 		else if (subfield->value[0] == 'G')  {
+				*minor = 606;
+			}
+	 		else if (subfield->value[0] == 'C')  {
+				*minor = 607;
+			}
+	 		else if (subfield->value[0] == 'V')  {
+				*minor = 609;
+			}
+	 		else if (subfield->value[0] == 'S')  {
+				*minor = 610;
+			}
+	 		else if (subfield->value[0] == 'L')  {
+				*minor = 611;
+			}
+	 		else if (subfield->value[0] == 'B')  {
+				*minor = 612;
+			}
+	 		else if (subfield->value[0] == 'A')  {
+				*minor = 619;
+			}
+	 		else if (subfield->value[0] == 'H')  {
+				*minor = 620;
+			}
+	 		else if (subfield->value[0] == 'I')  {
+				*minor = 621;
+			}
+	 		else if (subfield->value[0] == 'P')  {
+				*minor = 622;
+			}
+	 		else if (subfield->value[0] == 'E')  {
+				*minor = 623;
+			}
+	 		else if (subfield->value[0] == 'R')  {
+				*minor = 624;
+			}
+			else  {
+				return 1;
+			}
+			return 0;
+	 	case 89:
+			*major = features[i].major;
+	 		if (subfield->value[0] == 'R')  {
+				*minor = 613;
+			}
+	 		else if (subfield->value[0] == 'L')  {
+				*minor = 614;
+			}
+			else  {
+				return 1;
+			}
+			return 0;
+		case 95:
+			if (strncmp(subfield->value, "-9", 2) == 0)  {
+				return 1;	// -9 indicates unused
+			}
+			*major = features[i].major;
+			save_byte = subfield->value[subfield->length]; subfield->value[subfield->length] = '\0';
+			*minor = 680 + strtol(subfield->value, (char **)0, 10);
+			subfield->value[subfield->length] = save_byte;
+			return 0;
+	 	case 97:
+			if (strncmp(subfield->value, "-999", 4) == 0)  {
+				return 1;	// -999 indicates unused
+			}
+			*major = features[i].major;
+			save_byte = subfield->value[subfield->length]; subfield->value[subfield->length] = '\0';
+			*minor =  strtol(subfield->value, (char **)0, 10);
+			subfield->value[subfield->length] = save_byte;
+			return 0;
+	 	case 98:
+			if (strncmp(subfield->value, "-99", 3) == 0)  {
+				return 1;	// -99 indicates unused
+			}
+			*major = features[i].major;
+			save_byte = subfield->value[subfield->length]; subfield->value[subfield->length] = '\0';
+			*minor = strtol(subfield->value, (char **)0, 10);
+			subfield->value[subfield->length] = save_byte;
+			return 0;
+		case 100:
+			// Only for 2M-scale DLGs.
+			if ((subfield->length != 2) || ((subfield->value[0] == ' ') &&
+							(subfield->value[1] == ' ')))  {
+				return 1;
+			}
+			*major = features[i].major;
+			save_byte = subfield->value[subfield->length]; subfield->value[subfield->length] = '\0';
+			*minor =  strtol(subfield->value, (char **)0, 10);
+			subfield->value[subfield->length] = save_byte;
+			return 0;
+		case 101:
+			// Not sure this is handled properly, but it is only for 2M-scale DLGs.
+			if ((subfield->length != 4) || ((subfield->value[0] == ' ') &&
+							(subfield->value[1] == ' ') &&
+							(subfield->value[3] == ' ') &&
+							(subfield->value[4] == ' ')))  {
+				return 1;
+			}
+			*major = features[i].major;
+			save_byte = subfield->value[subfield->length]; subfield->value[subfield->length] = '\0';
+			// Minor code may be an alphabetic database key.  Don't know.
+			*minor =  strtol(subfield->value, (char **)0, 10);
+			subfield->value[subfield->length] = save_byte;
+			return 0;
+		default:
+			break;
+		};
+		break;
+
+	case PUBLIC_LAND_SURVEYS:
+		switch (features[i].key)  {
+ 		case 102:
+ 		case 103:
+ 		case 104:
+ 		case 105:
+ 		case 106:
+ 		case 113:
+ 		case 115:
+			if (subfield->value[0] == 'Y')  {
+				*major = features[i].major;
+				*minor = features[i].minor;
+				return 0;
+			}
+			else  {
+				return 1;
+			}
+		case 107:
+		case 114:
+			if (strncmp(subfield->value, "-9", 2) == 0)  {
+				return 1;	// -9 indicates unused
+			}
+			*major = features[i].major;
+			save_byte = subfield->value[subfield->length]; subfield->value[subfield->length] = '\0';
+			*minor = strtol(subfield->value, (char **)0, 10);
+			subfield->value[subfield->length] = save_byte;
+			return 0;
+		case 108:
+			if ((subfield->length != 8) || ((subfield->value[0] == ' ') &&
+							(subfield->value[1] == ' ') &&
+							(subfield->value[2] == ' ') &&
+							(subfield->value[3] == ' ') &&
+							(subfield->value[4] == ' ') &&
+							(subfield->value[5] == ' ') &&
+							(subfield->value[6] == ' ') &&
+							(subfield->value[7] == ' ')))  {
+				return 1;
+			}
+			save_byte = subfield->value[subfield->length]; subfield->value[subfield->length] = '\0';
+			*minor =  strtol(subfield->value, &end_ptr, 10);
+			/*
+			 * If there is a fractional number, we need to add in the special code
+			 * for it.
+			 */
+			if (strncmp(end_ptr, " 1/4", 4) == 0)  {
+				end_ptr += 4;
+				*minor += 2000;
+			}
+			else if (strncmp(end_ptr, " 1/2", 4) == 0)  {
+				end_ptr += 4;
+				*minor += 4000;
+			}
+			else if (strncmp(end_ptr, " 3/4", 4) == 0)  {
+				end_ptr += 4;
+				*minor += 6000;
+			}
+			while ((*end_ptr != '\0') && (*end_ptr == ' '))  { end_ptr++; }
+			if (*end_ptr != '\0')  {
+				if (*end_ptr == 'N')  {
+					*major = 302;
+				}
+				else if (*end_ptr == 'S')  {
+					*major = 303;
+				}
+				else  {
+					fprintf(stderr, "Warning:  Township number (SDTS=%.*s) has an unknown form.  Assuming this is a northern township.\n", subfield->length, subfield->value);
+					*major = 302;
+				}
+			}
+			else  {
+				fprintf(stderr, "Warning:  Township number (SDTS=%.*s) has no N/S designator.  N assumed.\n", subfield->length, subfield->value);
+				*major = 302;
+			}
+			subfield->value[subfield->length] = save_byte;
+			return 0;
+		case 109:
+			if ((subfield->length != 8) || ((subfield->value[0] == ' ') &&
+							(subfield->value[1] == ' ') &&
+							(subfield->value[2] == ' ') &&
+							(subfield->value[3] == ' ') &&
+							(subfield->value[4] == ' ') &&
+							(subfield->value[5] == ' ') &&
+							(subfield->value[6] == ' ') &&
+							(subfield->value[7] == ' ')))  {
+				return 1;
+			}
+			save_byte = subfield->value[subfield->length]; subfield->value[subfield->length] = '\0';
+			*minor =  strtol(subfield->value, &end_ptr, 10);
+			/*
+			 * If there is a fractional number, we need to add in the special digit for it at the
+			 * beginning of the minor attribute code.  Note that there are theoretically also
+			 * two other special digits:  8 for duplicate to north
+			 * or east of original township, 9 for triplicate to north or east of original
+			 * township.  As yet, I don't know if or how these latter two codes are encoded
+			 * in SDTS, so they aren't handled here.
+			 *
+			 * I have also found some odd range specifiers in the file:
+			 *
+			 *       DLG/LARGE_SCALE/H/hooven_OH/public_lands/PL01APLF.DDF
+			 *
+			 * that take the form "1AE" and "2AE".  I have no idea what the "A" is for.
+			 * So far, out of several thousands of files checked, this is the only file that I
+			 * have found these in.  Until I find out otherwise, I am assuming that these
+			 * are an error.  However, since there are 14 instances in this one SDTS file,
+			 * error may not be the correct explanation.
+			 */
+			if (strncmp(end_ptr, " 1/4", 4) == 0)  {
+				end_ptr += 4;
+				*minor += 2000;
+			}
+			else if (strncmp(end_ptr, " 1/2", 4) == 0)  {
+				end_ptr += 4;
+				*minor += 4000;
+			}
+			else if (strncmp(end_ptr, " 3/4", 4) == 0)  {
+				end_ptr += 4;
+				*minor += 6000;
+			}
+			while ((*end_ptr != '\0') && (*end_ptr == ' '))  { end_ptr++; }
+			if (*end_ptr != '\0')  {
+				if (*end_ptr == 'E')  {
+					*major = 304;
+				}
+				else if (*end_ptr == 'W')  {
+					*major = 305;
+				}
+				else  {
+					fprintf(stderr, "Warning:  Range number (SDTS=%.*s) has an unknown form.  Assuming this is an eastern range.\n", subfield->length, subfield->value);
+					*major = 304;
+				}
+			}
+			else  {
+				fprintf(stderr, "Warning:  Range number (SDTS=%.*s) has no E/W designator.  E assumed.\n", subfield->length, subfield->value);
+				*major = 304;
+			}
+			subfield->value[subfield->length] = save_byte;
+			return 0;
+		case 110:
+			/*
+			 * There are theoretically some possible alphabetic modifiers
+			 * for this case, but I have yet to find a file that contains any.
+			 */
+			if ((subfield->length != 4) || ((subfield->value[0] == ' ') &&
+							(subfield->value[1] == ' ') &&
+							(subfield->value[2] == ' ') &&
+							(subfield->value[3] == ' ')))  {
+				return 1;
+			}
+			*major = features[i].major;
+			save_byte = subfield->value[subfield->length]; subfield->value[subfield->length] = '\0';
+			*minor =  strtol(subfield->value, (char **)0, 10);
+			subfield->value[subfield->length] = save_byte;
+			return 0;
+		case 111:
+			/*
+			 * There are theoretically some possible alphabetic modifiers
+			 * for this case, but I have yet to find a file that contains any.
+			 */
+			if ((subfield->length != 4) || ((subfield->value[0] == ' ') &&
+							(subfield->value[1] == ' ') &&
+							(subfield->value[2] == ' ') &&
+							(subfield->value[3] == ' ')))  {
+				return 1;
+			}
+			*major = features[i].major;
+			save_byte = subfield->value[subfield->length]; subfield->value[subfield->length] = '\0';
+			*minor =  strtol(subfield->value, (char **)0, 10);
+			subfield->value[subfield->length] = save_byte;
+			return 0;
+		case 112:
+			/*
+			 * In theory, there can be monument numbers that contain
+			 * fractions of 1/2.  They require an extra "300 0625" to
+			 * be emitted.  I have yet to find an example, so we don't
+			 * handle such fractions yet.
+			 */
+			if ((subfield->length != 8) || ((subfield->value[0] == ' ') &&
+							(subfield->value[1] == ' ') &&
+							(subfield->value[2] == ' ') &&
+							(subfield->value[3] == ' ') &&
+							(subfield->value[4] == ' ') &&
+							(subfield->value[5] == ' ') &&
+							(subfield->value[6] == ' ') &&
+							(subfield->value[7] == ' ')))  {
+				return 1;
+			}
+			*major = 308;
+			save_byte = subfield->value[subfield->length]; subfield->value[subfield->length] = '\0';
+			*minor =  strtol(subfield->value, (char **)0, 10);
+			subfield->value[subfield->length] = save_byte;
+			return 0;
+		default:
+			break;
+		};
+		break;
+
+	default:
+		fprintf(stderr, "Couldn't find attribute feature name (%s).  Attribute feature ignored.  Internal codes: %d,%d\n", subfield->label, i, features[i].key);
+		return 1;
+		break;
+	}
+
+	return 1;
+}
+
+
+
+
+/*
+ * Read in all of the attribute files that affect this SDTS transfer.
+ *
+ * The process returns the number of attribute file entries in the attrib_files structure.
+ */
+long
+process_attrib_sdts(char *passed_file_name, char *category_name, long *data_type, long *color, long gz_flag, long upper_case_flag)
+{
+	struct subfield subfield;
+	long i, j, k;
+	double f;
+	long num_attrib_files;
+	long file_name_length;
+	char file_name[MAX_FILE_NAME + 1];
+	long current_size;
+	char type[2];
+	long parse_type;
+	long record_id;
+	char save_byte;
+	char *ptr;
+	long major, minor;
+	long major2, minor2;
+
+
+	num_attrib_files = 0;
+	file_name_length = strlen(passed_file_name);
+	file_name[MAX_FILE_NAME] = '\0';
+
+
+	/*
+	 * There are separate SDTS DLG files containing data records for
+	 * Nodes, Areas, and Lines.  We are going to open all of them and
+	 * search them for the attribute files that they reference.
+	 * The relevant modules are LE??, PC??, NE??, and NO??.
+	 * We will eventually read these line/area/node files again to get their other information.
+	 *
+	 * The Line, Area, and Node files reference the attribute Record IDs to attach
+	 * attributes to lines, areas, and nodes (degenerate lines) so we need this
+	 * information before we read in the Line, Area, and Node files, so that we
+	 * can build complete entries for each of the Nodes, Lines, and Areas.
+	 *
+	 * We store the attributes from each file in Record ID order.  (The Record IDs don't have
+	 * to be sequential numbers, under the SDTS standard, but we assume that they
+	 * are because the attribute files would have had to be generated from scratch during
+	 * conversion to SDTS.
+	 *
+	 * If attributes are used, they will be used in one of the four (NE??, PC??, NE??, NO??)
+	 * files.  Note that, while the main primary attribute file is named A??F, there may
+	 * be other primary attribute files, named ACOI (attributes that describe
+	 * concident features), AHPR (elevation attributes, in meters, for hypsography files),
+	 * AHPT (elevation attributes, in feet, for hypsography files), ARDM (route numbers
+	 * and route types for roads and trails), or ABDM (agency attributes for boundary
+	 * data, mostly for 2M-scale files, but occasionally for 100K-scale files).
+	 * (There are several types of secondary attribute files too, but they are only used for
+	 * 2,000,000 scale DLG data, so we ignore them.)
+	 *
+	 * We could also obtain the name of the attribute files by reading the CATD module.
+	 * By doing it the way we do here, we find out if any attributes are actually used.
+	 * The code was written the way it is because there might be multiple primary
+	 * attribute files in a given transfer, which would make the
+	 * CATD information ambiguous.  For example, a Transportation transfer could
+	 * contain ARDF, AMTF, and ARRF for roads, misc features, and railroads.  Generally,
+	 * we should only need one of these to process a given LE?? module.
+	 */
+	/*
+	 * Open the LE?? module in preparation for parsing.
+	 */
+	if (begin_ddf(passed_file_name) < 0)  {
+		fprintf(stderr, "Can't open %s for reading, errno = %d\n", passed_file_name, errno);
+		exit(0);
+	}
+	/*
+	 * Loop through the subfields until we find what we want.
+	 */
+	while (get_subfield(&subfield) != 0)  {
+		if (strcmp(subfield.tag, "ATID") == 0)  {
+			if ((strstr(subfield.format, "A") != (char *)0) && (strcmp(subfield.label, "MODN") == 0))  {
+				if (subfield.length == 4)  {
+					for (i = 0; i < num_attrib_files; i++)  {
+						if ((attrib_files[i].module_name[0] == subfield.value[0]) &&
+						    (attrib_files[i].module_name[1] == subfield.value[1]) &&
+						    (attrib_files[i].module_name[2] == subfield.value[2]) &&
+						    (attrib_files[i].module_name[3] == subfield.value[3]))  {
+							break;
+						}
+					}
+					if (i == num_attrib_files)  {
+						if (num_attrib_files == MAX_ATTRIB_FILES)  {
+							fprintf(stderr, "Ran out of space for attribute file names.\n");
+							break;
+						}
+						attrib_files[i].module_name[0] = subfield.value[0];
+						attrib_files[i].module_name[1] = subfield.value[1];
+						attrib_files[i].module_name[2] = subfield.value[2];
+						attrib_files[i].module_name[3] = subfield.value[3];
+						num_attrib_files++;
+					}
+				}
+				else  {
+					fprintf(stderr, "Attribute module ID %.*s does not appear correct.\n", subfield.length, subfield.value);
+				}
+			}
+		}
+	}
+	/* We are done with this file, so close it. */
+	end_ddf();
+
+
+
+	/* Now go on the the PC?? module (the polygon module). */
+	strncpy(file_name, passed_file_name, MAX_FILE_NAME);
+	if (upper_case_flag == 0)  {
+		if (gz_flag != 0)  {
+			file_name[file_name_length - 11] = 'p';
+			file_name[file_name_length - 10] = 'c';
+		}
+		else  {
+			file_name[file_name_length -  8] = 'p';
+			file_name[file_name_length -  7] = 'c';
+		}
+	}
+	else  {
+		if (gz_flag != 0)  {
+			file_name[file_name_length - 11] = 'P';
+			file_name[file_name_length - 10] = 'C';
+		}
+		else  {
+			file_name[file_name_length -  8] = 'P';
+			file_name[file_name_length -  7] = 'C';
+		}
+	}
+	/*
+	 * Open the file in preparation for parsing.
+	 */
+	if (begin_ddf(file_name) >= 0)  {
+		while (get_subfield(&subfield) != 0)  {
+			if (strcmp(subfield.tag, "ATID") == 0)  {
+				if ((strstr(subfield.format, "A") != (char *)0) && (strcmp(subfield.label, "MODN") == 0))  {
+					if (subfield.length == 4)  {
+						for (i = 0; i < num_attrib_files; i++)  {
+							if ((attrib_files[i].module_name[0] == subfield.value[0]) &&
+							    (attrib_files[i].module_name[1] == subfield.value[1]) &&
+							    (attrib_files[i].module_name[2] == subfield.value[2]) &&
+							    (attrib_files[i].module_name[3] == subfield.value[3]))  {
+								break;
+							}
+						}
+						if (i == num_attrib_files)  {
+							if (num_attrib_files == MAX_ATTRIB_FILES)  {
+								fprintf(stderr, "Ran out of space for attribute file names.\n");
+								break;
+							}
+							attrib_files[i].module_name[0] = subfield.value[0];
+							attrib_files[i].module_name[1] = subfield.value[1];
+							attrib_files[i].module_name[2] = subfield.value[2];
+							attrib_files[i].module_name[3] = subfield.value[3];
+							num_attrib_files++;
+						}
+					}
+					else  {
+						fprintf(stderr, "Attribute module ID %.*s does not appear correct.\n", subfield.length, subfield.value);
+					}
+				}
+			}
+		}
+		/* We are done with this file, so close it. */
+		end_ddf();
+	}
+
+
+
+	/* Now go on the the NO?? module (the planar-node module). */
+	strncpy(file_name, passed_file_name, MAX_FILE_NAME);
+	if (upper_case_flag == 0)  {
+		if (gz_flag != 0)  {
+			file_name[file_name_length - 11] = 'n';
+			file_name[file_name_length - 10] = 'o';
+		}
+		else  {
+			file_name[file_name_length -  8] = 'n';
+			file_name[file_name_length -  7] = 'o';
+		}
+	}
+	else  {
+		if (gz_flag != 0)  {
+			file_name[file_name_length - 11] = 'N';
+			file_name[file_name_length - 10] = 'O';
+		}
+		else  {
+			file_name[file_name_length -  8] = 'N';
+			file_name[file_name_length -  7] = 'O';
+		}
+	}
+	/*
+	 * Open the file in preparation for parsing.
+	 */
+	if (begin_ddf(file_name) >= 0)  {
+		while (get_subfield(&subfield) != 0)  {
+			if (strcmp(subfield.tag, "ATID") == 0)  {
+				if ((strstr(subfield.format, "A") != (char *)0) && (strcmp(subfield.label, "MODN") == 0))  {
+					if (subfield.length == 4)  {
+						for (i = 0; i < num_attrib_files; i++)  {
+							if ((attrib_files[i].module_name[0] == subfield.value[0]) &&
+							    (attrib_files[i].module_name[1] == subfield.value[1]) &&
+							    (attrib_files[i].module_name[2] == subfield.value[2]) &&
+							    (attrib_files[i].module_name[3] == subfield.value[3]))  {
+								break;
+							}
+						}
+						if (i == num_attrib_files)  {
+							if (num_attrib_files == MAX_ATTRIB_FILES)  {
+								fprintf(stderr, "Ran out of space for attribute file names.\n");
+								break;
+							}
+							attrib_files[i].module_name[0] = subfield.value[0];
+							attrib_files[i].module_name[1] = subfield.value[1];
+							attrib_files[i].module_name[2] = subfield.value[2];
+							attrib_files[i].module_name[3] = subfield.value[3];
+							num_attrib_files++;
+						}
+					}
+					else  {
+						fprintf(stderr, "Attribute module ID %.*s does not appear correct.\n", subfield.length, subfield.value);
+					}
+				}
+			}
+		}
+		/* We are done with this file, so close it. */
+		end_ddf();
+	}
+
+
+
+	/* Now go on the the NE?? module (the node-entity module). */
+	strncpy(file_name, passed_file_name, MAX_FILE_NAME);
+	if (upper_case_flag == 0)  {
+		if (gz_flag != 0)  {
+			file_name[file_name_length - 11] = 'n';
+			file_name[file_name_length - 10] = 'e';
+		}
+		else  {
+			file_name[file_name_length -  8] = 'n';
+			file_name[file_name_length -  7] = 'e';
+		}
+	}
+	else  {
+		if (gz_flag != 0)  {
+			file_name[file_name_length - 11] = 'N';
+			file_name[file_name_length - 10] = 'E';
+		}
+		else  {
+			file_name[file_name_length -  8] = 'N';
+			file_name[file_name_length -  7] = 'E';
+		}
+	}
+	/*
+	 * Open the file in preparation for parsing.
+	 */
+	if (begin_ddf(file_name) >= 0)  {
+		while (get_subfield(&subfield) != 0)  {
+			if (strcmp(subfield.tag, "ATID") == 0)  {
+				if ((strstr(subfield.format, "A") != (char *)0) && (strcmp(subfield.label, "MODN") == 0))  {
+					if (subfield.length == 4)  {
+						for (i = 0; i < num_attrib_files; i++)  {
+							if ((attrib_files[i].module_name[0] == subfield.value[0]) &&
+							    (attrib_files[i].module_name[1] == subfield.value[1]) &&
+							    (attrib_files[i].module_name[2] == subfield.value[2]) &&
+							    (attrib_files[i].module_name[3] == subfield.value[3]))  {
+								break;
+							}
+						}
+						if (i == num_attrib_files)  {
+							if (num_attrib_files == MAX_ATTRIB_FILES)  {
+								fprintf(stderr, "Ran out of space for attribute file names.\n");
+								break;
+							}
+							attrib_files[i].module_name[0] = subfield.value[0];
+							attrib_files[i].module_name[1] = subfield.value[1];
+							attrib_files[i].module_name[2] = subfield.value[2];
+							attrib_files[i].module_name[3] = subfield.value[3];
+							num_attrib_files++;
+						}
+					}
+					else  {
+						fprintf(stderr, "Attribute module ID %.*s does not appear correct.\n", subfield.length, subfield.value);
+					}
+				}
+			}
+		}
+		/* We are done with this file, so close it. */
+		end_ddf();
+	}
+
+
+
+	/*
+	 * Check to see that we didn't get any unknown file types.
+	 *
+	 * There are additional file types for 2,000,000-scale files,
+	 * but we don't handle such files so ignore those file types.
+	 */
+	for (i = 0; i < num_attrib_files; i++)  {
+		if ((strncmp(attrib_files[i].module_name, "AHPF", 4) != 0) &&
+		    (strncmp(attrib_files[i].module_name, "AHYF", 4) != 0) &&
+		    (strncmp(attrib_files[i].module_name, "ASCF", 4) != 0) &&
+		    (strncmp(attrib_files[i].module_name, "ANVF", 4) != 0) &&
+		    (strncmp(attrib_files[i].module_name, "ABDF", 4) != 0) &&
+		    (strncmp(attrib_files[i].module_name, "AMTF", 4) != 0) &&
+		    (strncmp(attrib_files[i].module_name, "ARDF", 4) != 0) &&
+		    (strncmp(attrib_files[i].module_name, "ARRF", 4) != 0) &&
+		    (strncmp(attrib_files[i].module_name, "AMSF", 4) != 0) &&
+		    (strncmp(attrib_files[i].module_name, "ASMF", 4) != 0) &&
+		    (strncmp(attrib_files[i].module_name, "APLF", 4) != 0) &&
+		    (strncmp(attrib_files[i].module_name, "ACOI", 4) != 0) &&
+		    (strncmp(attrib_files[i].module_name, "AHPR", 4) != 0) &&
+		    (strncmp(attrib_files[i].module_name, "AHPT", 4) != 0) &&
+		    (strncmp(attrib_files[i].module_name, "ABDM", 4) != 0) &&
+		    (strncmp(attrib_files[i].module_name, "ARDM", 4) != 0))  {
+			fprintf(stderr, "Unknown attribute file type (%s).  File type ignored.\n", attrib_files[i].module_name);
+			attrib_files[i].module_name[0] = attrib_files[num_attrib_files - 1].module_name[0];
+			attrib_files[i].module_name[1] = attrib_files[num_attrib_files - 1].module_name[1];
+			attrib_files[i].module_name[2] = attrib_files[num_attrib_files - 1].module_name[2];
+			attrib_files[i].module_name[3] = attrib_files[num_attrib_files - 1].module_name[3];
+			num_attrib_files--;
+		}
+	}
+
+
+
+	/*
+	 * The two middle characters of the Attribute Features file name
+	 * tell us the type of data in this SDTS theme.
+	 *
+	 * We can't reliably get this from the file name or category name
+	 * because Transportation files may have TR in their file name
+	 * or TRANSPORTATION as a category name.  (If we could pry it out,
+	 * we updated the category name from TRANSPORTATION to the proper
+	 * theme by looking in CATS.  However, we may have failed in the update.)
+	 * We need the more detailed RD, RR, or MT designators for transportation.
+	 *
+	 * Check that we have one and only one main Primary Attribute File.
+	 * If so, then get the theme from the module name.
+	 * If there is no main Primary Attribute File, or more than one, then
+	 * do our best to deduce the data type from the category name or
+	 * the passed_file_name.
+	 *
+	 * This may seem like a lot of work for a piece of data of so-so
+	 * importance.  However, we choose the color scheme of the map
+	 * based on this data, and it would be quite confusing to get
+	 * the color wrong.
+	 */
+	j = 0;
+	for (i = 0; i < num_attrib_files; i++)  {
+		if (attrib_files[i].module_name[3] == 'F')  {
+			j++;
+			k = i;
+		}
+	}
+	if (j != 1)  {
+		if (j > 1)  {
+			fprintf(stderr, "Warning:  More than one main primary attribute file.  Handling ambiguity as best I can.\n");
+		}
+		if ((category_name[0] != '\0') && (category_name[1] != '\0'))  {
+			switch(category_name[0])  {
+			case 'B':	/* BOUNDARIES */
+				type[0] = 'B';
+				type[1] = 'D';
+				break;
+			case 'H':
+				type[0] = 'H';
+				if (category_name[2] == 'D')  {
+					/* HYDROGRAPHY */
+					type[1] = 'Y';
+					break;
+				}
+				else  {
+					/* HYPSOGRAPHY */
+					type[1] = 'P';
+					break;
+				}
+			case 'P':
+				if (category_name[1] == 'I')  {
+					/* PIPE & TRANS LINES */
+					type[0] = 'M';
+					type[1] = 'T';
+					break;
+				}
+				else  {
+					/* PUBLIC LAND SURVEYS */
+					type[0] = 'P';
+					type[1] = 'L';
+					break;
+				}
+			case 'R':
+				type[0] = 'R';
+				if (category_name[1] == 'A')  {
+					/* RAILROADS */
+					type[1] = 'R';
+					break;
+				}
+				else  {
+					/* ROADS AND TRAILS */
+					type[1] = 'D';
+					break;
+				}
+			case 'M':	/* MANMADE FEATURES */
+				type[0] = 'M';
+				type[1] = 'S';
+				break;
+			case 'S':	/* SURVEY CONTROL */
+				type[0] = 'S';
+				type[1] = 'M';
+				break;
+			case 'V':	/* VEG SURFACE COVER */
+				type[0] = 'S';
+				type[1] = 'C';
+				break;
+			case 'N':	/* NON-VEG FEATURES */
+				type[0] = 'N';
+				type[1] = 'V';
+				break;
+			default:
+				fprintf(stderr, "Unknown theme %20.20s\n", category_name);
+				exit(0);
+				break;
+			}
+		}
+		else  {
+			if ((gz_flag != 0) && (file_name_length >= 15))  {
+				type[0] = toupper(passed_file_name[file_name_length - 15]);
+				type[1] = toupper(passed_file_name[file_name_length - 14]);
+			}
+			else if ((gz_flag == 0) && (file_name_length >= 12))  {
+				type[0] = toupper(passed_file_name[file_name_length - 12]);
+				type[1] = toupper(passed_file_name[file_name_length - 11]);
+			}
+			else  {
+				type[0] = '\0';
+				type[1] = '\0';
+			}
+		}
+	}
+	else {
+		type[0] = attrib_files[k].module_name[1];
+		type[1] = attrib_files[k].module_name[2];
+	}
+	switch(type[0])  {
+	case 'B':	/* BD: BOUNDARIES */
+		*color = GRAY;
+		*data_type = BOUNDARIES;
+		break;
+	case 'H':
+		if (type[1] == 'Y')  {
+			/* HY: HYDROGRAPHY */
+			*color = B_BLUE;
+			*data_type = HYDROGRAPHY;
+			break;
+		}
+		else  {
+			/* HP: HYPSOGRAPHY */
+			*color = L_ORANGE;
+			*data_type = HYPSOGRAPHY;
+			break;
+		}
+	case 'P':	/* PL: US PUBLIC LAND SURVEY SYSTEM */
+		*color = BLACK;
+		*data_type = PUBLIC_LAND_SURVEYS;
+		break;
+	case 'R':
+		if (type[1] == 'R')  {
+			/* RR: RAILROADS */
+			*color = BLACK;
+			*data_type = RAILROADS;
+			break;
+		}
+		else  {
+			/* RD: ROADS AND TRAILS */
+			*color = B_RED;
+			*data_type = ROADS_AND_TRAILS;
+			break;
+		}
+	case 'T':	/* TR: general transportation features.  Roads and trails assumed. */
+		*color = B_RED;
+		*data_type = ROADS_AND_TRAILS;
+		break;
+	case 'M':
+		if (type[1] == 'T')  {
+			/* MT: PIPELINES, TRANSMISSION LINES, and MISC TRANSPORTATION FEATURES */
+			*color = BLACK;
+			*data_type = PIPE_TRANS_LINES;
+		}
+		else  {
+			/* MS: MANMADE FEATURES */
+			*color = BLACK;
+			*data_type = MANMADE_FEATURES;
+		}
+		break;
+	case 'S':
+		if (type[1] == 'C')  {
+			/* SC: VEGETATIVE SURFACE COVER */
+			*color = B_GREEN;
+			*data_type = VEG_SURFACE_COVER;
+		}
+		else  {
+			/* SM: SURVEY CONTROL AND MARKERS */
+			*color = BLACK;
+			*data_type = SURVEY_CONTROL;
+		}
+		break;
+	case 'N':	/* NV: NON-VEG FEATURES */
+		*color = BLACK;
+		*data_type = NON_VEG_FEATURES;
+		break;
+	default:
+		fprintf(stderr, "Unknown data type %c%c, assuming Boundaries\n", type[0], type[1]);
+		*color = BLACK;
+		*data_type = BOUNDARIES;
+		break;
+	}
+
+
+
+	/*
+	 * At this point, we should have a complete list of attribute files
+	 * that need to be read in.  Thus, we go ahead and do the reading.
+	 */
+	for (i = 0; i < num_attrib_files; i++)  {
+		attrib_files[i].num_attrib = -1;
+		attrib_files[i].attrib = (struct attribute_list *)0;
+		current_size = 0;
+
+
+		/*
+		 * Convert the module name into a single number for later use.
+		 */
+		switch (attrib_files[i].module_name[3])  {
+		case 'F':	// main Primary Attribute Module.  (Files with names of the form A??F.)
+			parse_type = 0;
+			break;
+		case 'I':	// Coincidence Attribute Primary Module.  (Files with names of the form ACOI.)
+			parse_type = 1;
+			break;
+		case 'R':	// Elevation Attribute Primary Module (meters). (Files with names of the form AHPR.)
+			parse_type = 2;
+			break;
+		case 'T':	// Elevation Attribute Primary Module (feet). (Files with names of the form AHPT.)
+			parse_type = 3;
+			break;
+		case 'M':
+			if (attrib_files[i].module_name[1] == 'R')  {
+				// Route Attribute Primary Module. (Files with names of the form ARDM.)
+				parse_type = 4;
+			}
+			else  {
+				// Agency Attribute Primary Module. (Files with names of the form ABDM.)
+				parse_type = 5;
+			}
+			break;
+		default:
+			fprintf(stderr, "Unknown attribute file type (%s).  Should have been detected earlier.\n", attrib_files[i].module_name);
+			exit(0);
+			break;
+		}
+
+
+		/*
+		 * Generate the file name to be opened.
+		 */
+		strncpy(file_name, passed_file_name, MAX_FILE_NAME);
+		if (upper_case_flag == 0)  {
+			if (gz_flag != 0)  {
+				file_name[file_name_length - 11] = tolower(attrib_files[i].module_name[0]);
+				file_name[file_name_length - 10] = tolower(attrib_files[i].module_name[1]);
+				file_name[file_name_length -  9] = tolower(attrib_files[i].module_name[2]);
+				file_name[file_name_length -  8] = tolower(attrib_files[i].module_name[3]);
+			}
+			else  {
+				file_name[file_name_length -  8] = tolower(attrib_files[i].module_name[0]);
+				file_name[file_name_length -  7] = tolower(attrib_files[i].module_name[1]);
+				file_name[file_name_length -  6] = tolower(attrib_files[i].module_name[2]);
+				file_name[file_name_length -  5] = tolower(attrib_files[i].module_name[3]);
+			}
+		}
+		else  {
+			if (gz_flag != 0)  {
+				file_name[file_name_length - 11] = attrib_files[i].module_name[0];
+				file_name[file_name_length - 10] = attrib_files[i].module_name[1];
+				file_name[file_name_length -  9] = attrib_files[i].module_name[2];
+				file_name[file_name_length -  8] = attrib_files[i].module_name[3];
+			}
+			else  {
+				file_name[file_name_length -  8] = attrib_files[i].module_name[0];
+				file_name[file_name_length -  7] = attrib_files[i].module_name[1];
+				file_name[file_name_length -  6] = attrib_files[i].module_name[2];
+				file_name[file_name_length -  5] = attrib_files[i].module_name[3];
+			}
+		}
+		/*
+		 * Open and process the file.
+		 */
+		if (begin_ddf(file_name) >= 0)  {
+			while (get_subfield(&subfield) != 0)  {
+				if (strcmp(subfield.tag, "ATPR") == 0)  {
+					if ((strstr(subfield.format, "A") != (char *)0) && (strcmp(subfield.label, "MODN") == 0))  {
+						if (subfield.length != 4)  {
+							fprintf(stderr, "Attribute module name (%.*s) is not 4 characters long.\n", subfield.length, subfield.value);
+							continue;
+						}
+						if (strncmp(subfield.value, attrib_files[i].module_name, 4) != 0)  {
+							fprintf(stderr, "Module name in record (%.*s) doesn't match global module name.  Entry ignored.\n",
+									subfield.length, subfield.value);
+							continue;
+						}
+					}
+					else if ((strstr(subfield.format, "I") != (char *)0) && (strcmp(subfield.label, "RCID") == 0))  {
+						save_byte = subfield.value[subfield.length]; subfield.value[subfield.length] = '\0';
+						record_id = strtol(subfield.value, (char **)0, 10);
+						subfield.value[subfield.length] = save_byte;
+
+						/*
+						 * Hopefully, record IDs won't backtrack very often,
+						 * but I have seen it happen.  Print a warning if it
+						 * does happen.
+						 */
+						if ((record_id - 1) < attrib_files[i].num_attrib)  {
+							fprintf(stderr, "Warning:  Record IDs don't appear to be sequential in file %s.  Some attributes may be lost or corrupted.\n", file_name);
+						}
+						else  {
+							attrib_files[i].num_attrib++;
+
+							/*
+							 * If the record_id numbers exceed the number of attribs,
+							 * then make a hole in the attrib table to accommodate them.
+							 */
+							if ((record_id - 1) > attrib_files[i].num_attrib)  {
+								attrib_files[i].num_attrib = record_id - 1;
+							}
+
+							/*
+							 * If we need more space, get it.
+							 */
+							if (attrib_files[i].num_attrib > (current_size - 1))  {
+								current_size = attrib_files[i].num_attrib + 100;
+								attrib_files[i].attrib = (struct attribute_list *)realloc(attrib_files[i].attrib, sizeof(struct attribute_list) * current_size);
+								if (attrib_files[i].attrib == (struct attribute_list *)0)  {
+									fprintf(stderr, "realloc of attrib_files[].attrib failed.\n");
+									exit(0);
+								}
+							}
+
+							/*
+							 * Null out all of the attributes for the new entry.
+							 */
+							for (j = 0; j < MAX_EXTRA; j++)  {
+								attrib_files[i].attrib[attrib_files[i].num_attrib].minor[j] = 0;
+								attrib_files[i].attrib[attrib_files[i].num_attrib].major[j] = 0;
+							}
+						}
+					}
+				}
+				else if (strcmp(subfield.tag, "ATTP") == 0)  {
+					if (attrib_files[i].num_attrib < 0)  {
+						fprintf(stderr, "Attribute labels out of sequence in %s.\n", file_name);
+						exit(0);
+					}
+					switch (parse_type)  {
+					case 0:	// main Primary Attribute Module.  (Modules with names of the form A??F.)
+						if ((strstr(subfield.format, "A") != (char *)0) && (strncmp(subfield.label, "ENTITY_LABEL", 12) == 0))  {
+							if (subfield.length == 7)  {
+								save_byte = subfield.value[subfield.length]; subfield.value[subfield.length] = '\0';
+								attrib_files[i].attrib[record_id - 1].minor[0] = strtol(&subfield.value[3], (char **)0, 10);
+								subfield.value[subfield.length] = save_byte;
+		
+								save_byte = subfield.value[3]; subfield.value[3] = '\0';
+								attrib_files[i].attrib[record_id - 1].major[0] =  strtol(subfield.value, (char **)0, 10);
+								subfield.value[3] = save_byte;
+							}
+							else  {
+								fprintf(stderr, "unexpected attribute length (%d) in file %s\n", subfield.length, file_name);
+							}
+						}
+						else  {
+							/*
+							 * Rather than have multiple attribute codes, the SDTS TVP
+							 * modules encode extra attributes as additional features
+							 * of the primary attribute, as part of the single ISO 8211
+							 * record associated with the attribute.  (Occasionally there may still be
+							 * multiple primary attributes, and hence multiple ISO 8211 records.)
+							 * Because the code to parse the extra information is quite bulky,
+							 * it is relegated to a separate function.
+							 *
+							 * If we haven't already found an attribute, then there is something
+							 * wrong.  Avoid a core dump by checking num_attrib.
+							 */
+							major2 = 0;
+							if (get_extra_attrib(*data_type, &major, &minor, &major2, &minor2, &subfield) == 0)  {
+								for (j = 0; j < MAX_EXTRA; j++)  {
+									if (attrib_files[i].attrib[record_id - 1].major[j] == 0)  {
+										break;
+									}
+								}
+								if (j == MAX_EXTRA)  {
+									fprintf(stderr, "Ran out of space for attribute features.  One attribute is missing.\n");
+									continue;
+								}
+								attrib_files[i].attrib[record_id - 1].major[j] = major;
+								attrib_files[i].attrib[record_id - 1].minor[j] = minor;
+								if (major2 != 0)  {
+									if ((j + 1) == MAX_EXTRA)  {
+										fprintf(stderr, "Ran out of space for attribute features.  One attribute is missing.\n");
+										continue;
+									}
+									attrib_files[i].attrib[record_id - 1].major[j + 1] = major2;
+									attrib_files[i].attrib[record_id - 1].minor[j + 1] = minor2;
+								}
+							}
+						}
+						break;
+					case 1:	// Coincidence Attribute Primary Module.  (Modules with names of the form ACOI.)
+						if ((strstr(subfield.format, "A") != (char *)0) && (strncmp(subfield.label, "COINCIDENT", 10) == 0))  {
+							if (subfield.length == 2)  {
+								save_byte = subfield.value[subfield.length]; subfield.value[subfield.length] = '\0';
+								attrib_files[i].attrib[record_id - 1].minor[0] = strtol(subfield.value, (char **)0, 10);
+								subfield.value[subfield.length] = save_byte;
+		
+								attrib_files[i].attrib[record_id - 1].major[0] =  *data_type + 9;
+							}
+							else  {
+								fprintf(stderr, "unexpected attribute length (%d) in file %s\n", subfield.length, file_name);
+							}
+						}
+						else  {
+							fprintf(stderr, "Unrecognized attribute label (%s) in file %s.\n", subfield.label, file_name);
+						}
+						break;
+					case 2:	// Elevation Attribute Primary Module (meters). (Modules with names of the form AHPR.)
+						if ((strstr(subfield.format, "R") != (char *)0) && (strncmp(subfield.label, "ELEVATION", 9) == 0))  {
+							if (subfield.length == 8)  {
+								save_byte = subfield.value[subfield.length]; subfield.value[subfield.length] = '\0';
+								f = strtod(subfield.value, (char **)0);
+								subfield.value[subfield.length] = save_byte;
+								if (f < 0.0)  {
+									attrib_files[i].attrib[record_id - 1].major[0] = 25;
+									attrib_files[i].attrib[record_id - 1].minor[0] = -f;
+								}
+								else  {
+									attrib_files[i].attrib[record_id - 1].major[0] = 24;
+									attrib_files[i].attrib[record_id - 1].minor[0] = f;
+								}
+							}
+							else  {
+								fprintf(stderr, "unexpected attribute length (%d) in file %s\n", subfield.length, file_name);
+							}
+						}
+						else  {
+							fprintf(stderr, "Unrecognized attribute label (%s) in file %s.\n", subfield.label, file_name);
+						}
+						break;
+					case 3:	// Elevation Attribute Primary Module (feet). (Modules with names of the form AHPT.)
+						if ((strstr(subfield.format, "R") != (char *)0) && (strncmp(subfield.label, "ELEVATION", 9) == 0))  {
+							if (subfield.length == 8)  {
+								save_byte = subfield.value[subfield.length]; subfield.value[subfield.length] = '\0';
+								f = strtod(subfield.value, (char **)0);
+								subfield.value[subfield.length] = save_byte;
+								if (f < 0.0)  {
+									attrib_files[i].attrib[record_id - 1].major[0] = 23;
+									attrib_files[i].attrib[record_id - 1].minor[0] = -f;
+								}
+								else  {
+									if (f > 9999.0)  {
+										attrib_files[i].attrib[record_id - 1].major[0] = 21;
+										attrib_files[i].attrib[record_id - 1].minor[0] = f - 10000.0;
+									}
+									else  {
+										attrib_files[i].attrib[record_id - 1].major[0] = 22;
+										attrib_files[i].attrib[record_id - 1].minor[0] = f;
+									}
+								}
+							}
+							else  {
+								fprintf(stderr, "unexpected attribute length (%d) in file %s\n", subfield.length, file_name);
+							}
+						}
+						else  {
+							fprintf(stderr, "Unrecognized attribute label (%s) in file %s.\n", subfield.label, file_name);
+						}
+						break;
+					case 4:	// Route Attribute Primary Module. (Modules with names of the form ARDM.)
+						if ((strstr(subfield.format, "A") != (char *)0) && (strncmp(subfield.label, "ROUTE_NUMBER", 12) == 0))  {
+							/*
+							 * In theory, these can be quite complicated.
+							 * In general, they presumably can take the form:
+							 *
+							 * US A27Z
+							 *
+							 * Where US is the jurisdiction, and the route
+							 * number may be all alphabetic, all numeric, or
+							 * mixed.  We assume that these don't get any more
+							 * complicated than this:  jurisdiction, followed
+							 * by a numeric number, with something alphabetic in
+							 * front or behind (but not both).
+							 *
+							 * The spec also seems to imply that, if there is a route type,
+							 * it should follow the jurisdiction and route number,
+							 * but preceed any trailing alphabetic on the route number.
+							 * This would require some amazing contortions to do,
+							 * so we will just tack on the route type after everything
+							 * else.  (It is handled in another code block, below.)
+							 */
+							if (subfield.length == 7)  {
+								for (j = 0; j < MAX_EXTRA; j++)  {
+									if (attrib_files[i].attrib[record_id - 1].major[j] == 0)  {
+										break;
+									}
+								}
+								if (j == MAX_EXTRA)  {
+									fprintf(stderr, "Ran out of space for attribute features.  One attribute is missing.\n");
+									continue;
+								}
+								save_byte = subfield.value[subfield.length]; subfield.value[subfield.length] = '\0';
+								if ((ptr = strstr(subfield.value, "I")) != (char *)0)  {
+									attrib_files[i].attrib[record_id - 1].major[j] = 172;
+									k = ptr - subfield.value + 1;
+								}
+								else if ((ptr = strstr(subfield.value, "US")) != (char *)0)  {
+									attrib_files[i].attrib[record_id - 1].major[j] = 173;
+									k = ptr - subfield.value + 2;
+								}
+								else if ((ptr = strstr(subfield.value, "SR")) != (char *)0)  {
+									attrib_files[i].attrib[record_id - 1].major[j] = 174;
+									k = ptr - subfield.value + 2;
+								}
+								else if ((ptr = strstr(subfield.value, "RR")) != (char *)0)  {
+									attrib_files[i].attrib[record_id - 1].major[j] = 175;
+									k = ptr - subfield.value + 2;
+								}
+								else if ((ptr = strstr(subfield.value, "CR")) != (char *)0)  {
+									attrib_files[i].attrib[record_id - 1].major[j] = 176;
+									k = ptr - subfield.value + 2;
+								}
+								else  {
+									k = 0;
+								}
+								ptr += k;
+								while ((*ptr != '\0') && (*ptr == ' '))  {ptr++;}
+								if (*ptr != '\0')  {
+									/* We have jurisdiction, followed by numeric route. */
+									if ((*ptr >= '0') && (*ptr <= '9'))  {
+										attrib_files[i].attrib[record_id - 1].minor[j] = strtol(ptr, &ptr, 10);
+										while ((*ptr != '\0') && (*ptr == ' '))  {ptr++;}
+										if (*ptr != '\0')  {
+											if ((j + 1) == MAX_EXTRA)  {
+												fprintf(stderr, "Ran out of space for attribute features.  One attribute is missing.\n");
+												continue;
+											}
+											j++;
+											attrib_files[i].attrib[record_id - 1].major[j] = 177;
+											attrib_files[i].attrib[record_id - 1].minor[j] = *ptr - 'A' + 1;
+											ptr++;
+											if ((*ptr != '\0') && (*ptr != ' '))  {
+												attrib_files[i].attrib[record_id - 1].minor[j] *= 100;
+												attrib_files[i].attrib[record_id - 1].minor[j] += *ptr - 'A' + 1;
+											}
+										}
+									}
+									else  {
+										/*
+										 * We have jurisdiction, followed by alphabetic route.
+										 * The spec seems to imply that the alpha code should
+										 * preceed the jurisdictional code, so do it.
+										 */
+										if ((j + 1) == MAX_EXTRA)  {
+											fprintf(stderr, "Ran out of space for attribute features.  One attribute is missing.\n");
+											continue;
+										}
+										attrib_files[i].attrib[record_id - 1].major[j + 1] = attrib_files[i].attrib[record_id - 1].major[j];
+										attrib_files[i].attrib[record_id - 1].minor[j] = *ptr - 'A' + 1;
+										ptr++;
+										if ((*ptr != '\0') && (*ptr != ' '))  {
+											attrib_files[i].attrib[record_id - 1].minor[j] *= 100;
+											attrib_files[i].attrib[record_id - 1].minor[j] += *ptr - 'A' + 1;
+										}
+										attrib_files[i].attrib[record_id - 1].minor[j + 1] = strtol(ptr, (char **)0, 10);
+									}
+								}
+								else  {
+									attrib_files[i].attrib[record_id - 1].minor[j] = 0;
+								}
+							}
+							else  {
+								fprintf(stderr, "unexpected attribute length (%d) in file %s\n", subfield.length, file_name);
+							}
+						}
+						else if ((strstr(subfield.format, "A") != (char *)0) && (strncmp(subfield.label, "ROUTE_TYPE", 10) == 0))  {
+							if (subfield.length == 9)  {
+								for (j = 0; j < MAX_EXTRA; j++)  {
+									if (attrib_files[i].attrib[record_id - 1].major[j] == 0)  {
+										break;
+									}
+								}
+								if (j == MAX_EXTRA)  {
+									fprintf(stderr, "Ran out of space for attribute features.  One attribute is missing.\n");
+									continue;
+								}
+								if (strncmp(subfield.value, "Bypass", 6) == 0)  {
+									attrib_files[i].attrib[record_id - 1].minor[j] = 615;
+								}
+								else if (strncmp(subfield.value, "Alternate", 9) == 0)  {
+									attrib_files[i].attrib[record_id - 1].minor[j] = 616;
+								}
+								else if (strncmp(subfield.value, "Business", 8) == 0)  {
+									attrib_files[i].attrib[record_id - 1].minor[j] = 617;
+								}
+								else if (strncmp(subfield.value, "Spur", 4) == 0)  {
+									attrib_files[i].attrib[record_id - 1].minor[j] = 619;
+								}
+								else if (strncmp(subfield.value, "Loop", 4) == 0)  {
+									attrib_files[i].attrib[record_id - 1].minor[j] = 620;
+								}
+								else if (strncmp(subfield.value, "Connector", 9) == 0)  {
+									attrib_files[i].attrib[record_id - 1].minor[j] = 621;
+								}
+								else if (strncmp(subfield.value, "Truck", 5) == 0)  {
+									attrib_files[i].attrib[record_id - 1].minor[j] = 622;
+								}
+								else  {
+									continue;
+								}
+								attrib_files[i].attrib[record_id - 1].major[j] = 170;
+							}
+							else  {
+								fprintf(stderr, "unexpected attribute length (%d) in file %s\n", subfield.length, file_name);
+							}
+						}
+						else  {
+							fprintf(stderr, "Unrecognized attribute label (%s) in file %s.\n", subfield.label, file_name);
+						}
+						break;
+					case 5:	// Agency Attribute Primary Module. (Modules with names of the form ABDM.)
+						if ((strstr(subfield.format, "A") != (char *)0) && (strncmp(subfield.label, "AGENCY", 6) == 0))  {
+							if (subfield.length == 3)  {
+								attrib_files[i].attrib[record_id - 1].major[0] = 97;
+								save_byte = subfield.value[subfield.length]; subfield.value[subfield.length] = '\0';
+								attrib_files[i].attrib[record_id - 1].minor[0] = strtol(subfield.value, (char **)0, 10);
+								subfield.value[subfield.length] = save_byte;
+							}
+							else  {
+								fprintf(stderr, "unexpected attribute length (%d) in file %s\n", subfield.length, file_name);
+							}
+						}
+						else  {
+							fprintf(stderr, "Unrecognized attribute label (%s) in file %s.\n", subfield.label, file_name);
+						}
+						break;
+					}
+				}
+			}
+			/* We are done with this file, so close it. */
+			end_ddf();
+		}
+		attrib_files[i].num_attrib++;
+	}
+
+	return num_attrib_files;
+}
+
+
+
+
+
+
+
+/*
+ * Because of the way attributes are stored in the main Primary Attribute File,
+ * we can end up with duplicate attributes in the list.  This routine removes
+ * the duplicates.  It removes the earliest occurence(s) of the dups since
+ * that appears to be most likely to make the list ordering match the ordering
+ * of the original (pre-SDTS) files.  As far as I know, there is no preferred
+ * ordering for attribute lists.
+ *
+ * As an example, the following attribute list:
+ *
+ * "    50   412    50   202    50   610"
+ *
+ * contains the "50   610" attribute (which stands for "INTERMITTENT").
+ * This attribute is coded in SDTS as a flag in the attribute record.
+ * The attribute list will be encoded as two SDTS attribute records,
+ * one for "50   412" (Stream) and one for "50   202" (Closure Line).
+ * Each of these records will have the "INTERMITTENT" flag set.
+ *
+ * When we decode these records, we will end up with two copies of "50   610".
+ * One of these should be removed.
+ */
+void
+uniq_attrib(struct attribute **initial_attrib, short *attrib)
+{
+	short i;
+	struct attribute **current_base;
+	struct attribute **current_attrib;
+	struct attribute **search_attrib;
+	struct attribute **next_attrib;
+	struct attribute **prev_attrib;
+
+	current_base = initial_attrib;
+	for (i = 0; i < *attrib; i++)  {
+		search_attrib = current_base;
+		while (*(search_attrib = &((*search_attrib)->attribute)) != (struct attribute *)0)  {
+			if (((*search_attrib)->major != 177) &&		// 177 is a special case for spelling out alphabetic items
+			    ((*search_attrib)->major == (*current_base)->major) &&
+			    ((*search_attrib)->minor == (*current_base)->minor))  {
+				/*
+				 * We have found a duplicate.  Remove the earlier entry.
+				 *
+				 * It is easier to alter the list by unlinking the
+				 * last entry than by removing an entry from the middle
+				 * of the list.  Thus, we shift the data backwards
+				 * to fill in the unwanted entry, rather than removing the
+				 * actual list element of the unwanted entry.
+				 * Then we simply unlink and free the final entry.
+				 */
+				prev_attrib = (struct attribute **)0;	// Induce a core dump if there is a bug in the code.
+				current_attrib = current_base;
+				next_attrib = current_base;
+				while (*(next_attrib = &((*next_attrib)->attribute)) != (struct attribute *)0)  {
+					(*current_attrib)->major = (*next_attrib)->major;
+					(*current_attrib)->minor = (*next_attrib)->minor;
+					prev_attrib = current_attrib;
+					current_attrib = next_attrib;
+				}
+				(*prev_attrib)->attribute = (struct attribute *)0;
+				free(*current_attrib);
+				(*attrib)--;
+				search_attrib = current_base;
+			}
+		}
+		current_base = &((*current_base)->attribute);
+	}
+}
+
+
+
+
+/*
+ * Read the CATS module and try to find the theme.
+ */
+void get_theme(char *passed_file_name, char *category_name, long upper_case_flag, long gz_flag)
+{
+	struct subfield subfield;
+	long file_name_length;
+	char file_name[MAX_FILE_NAME + 1];
+	char lookin_for[4];
+	long got_it;
+	long i;
+
+
+	/*
+	 * Generate the file name for the CATS module.
+	 */
+	strncpy(file_name, passed_file_name, MAX_FILE_NAME);
+	file_name[MAX_FILE_NAME] = '\0';
+	file_name_length = strlen(file_name);
+	if (upper_case_flag == 0)  {
+		if (gz_flag != 0)  {
+			lookin_for[0] = toupper(file_name[file_name_length - 11]);
+			lookin_for[1] = toupper(file_name[file_name_length - 10]);
+			lookin_for[2] = toupper(file_name[file_name_length -  9]);
+			lookin_for[3] = toupper(file_name[file_name_length -  8]);
+
+			file_name[file_name_length - 11] = 'c';
+			file_name[file_name_length - 10] = 'a';
+			file_name[file_name_length -  9] = 't';
+			file_name[file_name_length -  8] = 's';
+		}
+		else  {
+			lookin_for[0] = toupper(file_name[file_name_length -  8]);
+			lookin_for[1] = toupper(file_name[file_name_length -  7]);
+			lookin_for[2] = toupper(file_name[file_name_length -  6]);
+			lookin_for[3] = toupper(file_name[file_name_length -  5]);
+
+			file_name[file_name_length -  8] = 'c';
+			file_name[file_name_length -  7] = 'a';
+			file_name[file_name_length -  6] = 't';
+			file_name[file_name_length -  5] = 's';
+		}
+	}
+	else  {
+		if (gz_flag != 0)  {
+			lookin_for[0] = file_name[file_name_length - 11];
+			lookin_for[1] = file_name[file_name_length - 10];
+			lookin_for[2] = file_name[file_name_length -  9];
+			lookin_for[3] = file_name[file_name_length -  8];
+
+			file_name[file_name_length - 11] = 'C';
+			file_name[file_name_length - 10] = 'A';
+			file_name[file_name_length -  9] = 'T';
+			file_name[file_name_length -  8] = 'S';
+		}
+		else  {
+			lookin_for[0] = file_name[file_name_length -  8];
+			lookin_for[1] = file_name[file_name_length -  7];
+			lookin_for[2] = file_name[file_name_length -  6];
+			lookin_for[3] = file_name[file_name_length -  5];
+
+			file_name[file_name_length -  8] = 'C';
+			file_name[file_name_length -  7] = 'A';
+			file_name[file_name_length -  6] = 'T';
+			file_name[file_name_length -  5] = 'S';
+		}
+	}
+	/*
+	 * Open and process the file.
+	 */
+	got_it = 0;
+	if (begin_ddf(file_name) >= 0)  {
+		while (get_subfield(&subfield) != 0)  {
+			if (strcmp(subfield.tag, "CATS") == 0)  {
+				if ((strstr(subfield.format, "A") != (char *)0) && (strcmp(subfield.label, "NAME") == 0))  {
+					if (subfield.length != 4)  {
+						fprintf(stderr, "Attribute module name (%.*s) is not 4 characters long.\n", subfield.length, subfield.value);
+						continue;
+					}
+					if (strncmp(subfield.value, lookin_for, 4) == 0)  {
+						got_it = 1;
+					}
+					else  {
+						got_it = 0;
+					}
+				}
+				else if ((strstr(subfield.format, "A") != (char *)0) && (strcmp(subfield.label, "THEM") == 0))  {
+					if (got_it != 0)  {
+						if ((subfield.length != 20) || (subfield.value[0] == ' '))  {
+							continue;
+						}
+						strncpy(category_name, subfield.value, subfield.length);
+						for (i = 19; i >= 0; i--)  {
+							if (category_name[i] != ' ')  {
+								category_name[i + 1] = '\0';
+								break;
+							}
+						}
+
+						return;
+					}
+				}
+			}
+		}
+	}
+
+	return;
+}

Added: packages/drawmap/branches/upstream/current/drawmap.1n
===================================================================
--- packages/drawmap/branches/upstream/current/drawmap.1n	                        (rev 0)
+++ packages/drawmap/branches/upstream/current/drawmap.1n	2007-05-18 12:29:03 UTC (rev 834)
@@ -0,0 +1,1121 @@
+.TH DRAWMAP 1 "Aug  1, 2001" \" -*- nroff -*-
+.SH NAME
+drawmap \- draw customized maps, using raw USGS data files
+.SH SYNOPSIS
+.B drawmap
+.RB [\-l\ latitude1,longitude1,latitude2,longitude2]\ [\-L]
+.br
+.RB [\-o\ output_file.sun]\ [\-d\ dem_file1\ [\-d\ dem_file2\ [...]]]
+.br
+.RB [\-c\ contour_interval_in_meters]
+.br
+.RB [\-C\ contour_interval_in_meters]
+.br
+.RB [\-g\ gnis_file]\ [\-a\ attribute_file]\ [\-x\ x_size]\ [\-y\ y_size]
+.br
+.RB [\-w]\ [\-n\ color_table_number]\ [\-r\ relief_factor]\ [\-z]
+.br
+.RB [\-i]\ [\-h]\ [\-t]\ [dlg_file1\ [dlg_file2\ [...]]]
+.SH VERSION
+This is the manual page for version 2.5 of drawmap.
+.SH DESCRIPTION
+.PP
+The U.S. Geological Survey, and other sources, support sites on the Internet with
+many gigabytes of raw geographic data, mostly for the USA.
+.I Drawmap
+draws maps, using a subset of the available data.
+The relevant subset includes:
+.TP
+.I "250K Digital Elevation Model (DEM) files"
+Each file covers a block, one-degree square, with a 1201 by 1201 grid of elevations (in meters).
+The extra sample in each direction is due to overlap of the DEM files at their edges.
+(Files for Alaska use smaller grids, with only 401 or 601 samples in the east-west direction.)
+For Hawaii and the "lower 48," the one-degree square is covered by elevation samples
+spaced 3 arc-seconds apart; and you will often hear these files called 3-arc-second
+files.
+In terms of distance along the ground, the sample spacing varies with latitude.
+It is generally less than 100 meters.  (The "250K" means that the data were digitized
+from a map at the scale of 1:250,000.)  Files of this type are currently available
+for free download from the USGS.
+.TP
+.I "24K Digital Elevation Model (DEM) files (in the 'classic' format or the SDTS format)"
+These files usually result from digitizing a "quad" map sheet.  (Each 1-degree
+block of latitude and longitude, covered by a 250K DEM file, is further subdivided into
+an 8-by-8 grid to produce smaller blocks called quads.)
+The number of samples in each direction varies, from quad to quad, but there is
+roughly one sample per second of arc.  These files differ from the 250K DEM files
+in that the samples are spaced a fixed number of meters apart rather than a fixed
+number of arc-seconds.  Because each quad represents an area that is 7.5 minutes
+of latitude by 7.5 minutes of longitude, you will also hear these files called
+7.5-minute DEMs.  The USGS provides 24K DEM data for free download, in
+the Spatial Data Transfer System (SDTS) format.
+Some files in the older format are available from other sources,
+and may be available for purchase from the USGS.
+.TP
+.I "100K Digital Line Graph (DLG) files (in the 'optional' format or the SDTS format)"
+These files come in collections, each of which covers a quarter of the one-degree
+square covered by a 250K DEM file.  The files contain information that allows segmented
+linear and polygonal features to be drawn on maps, including boundary lines,
+hydrographic features (streams, lakes, and so on),
+transportation features (roads, rail lines, pipelines, and so on),
+public land survey data, and hypsographic lines (the familiar contour lines of a topographic map).
+The different general classes of data come in separate files.
+Files of this type are currently available for free download from the USGS,
+in either the 'optional' format or the SDTS format.
+.TP
+.I "24K Digital Line Graph (DLG) files (in the 'optional' format or the SDTS format)"
+Like the 24K DEM files, each of these files covers a single quad.
+Except for their inherently greater detail, these files are essentially the same
+as the 100K DLG files.  As with the 24K DEM files, these files are freely available
+in SDTS format, but are harder to come by in the 'optional' format.
+.TP
+.I "GTOPO30 files"
+GTOPO30 files are similar to 250K DEM files, but with samples spaced 30 arc-seconds apart,
+and with a quite different format.  (For the purposes of this manual page, you
+can consider GTOPO30 files as just another variety of DEM files.)
+While these files have relatively low resolution, they have the virtue of providing
+full coverage for the entire planet.  Furthermore, they are
+currently available for free download.
+.TP
+.I "Geographic Names Information System (GNIS) files"
+These files are basically lists of place names, with the addition of latitude/longitude and other information.
+They are available for free download.
+.PP
+If a DEM or DLG file is one of the above types, and is from the USGS, there is a good chance that
+.I drawmap
+can use it.  However, given the range of available files, this is not a certainty.
+For example, the USGS used to distribute 100K DLG files in 'standard' format, with
+the characters 'std' as part of the file name.
+Neither the USGS nor
+.I drawmap
+support these files anymore, but you may still be able to obtain them.
+There is also a certain amount of variability in the format of USGS SDTS files, and
+.I drawmap
+may not be able to handle some of the variants, especially if I have never
+obtained a relevant sample file to test against.
+Files from non-USGS sources may also be usable.  However, people don't always
+strictly follow the relevant standards when they create DEM or DLG files,
+and the standards can sometimes be difficult to interpret, and
+.I drawmap
+isn't infinitely adaptable, so not all files can be processed.
+.PP
+Using the data in the various files,
+.I drawmap
+can produce various kinds of customized maps, including shaded relief maps (with or without roads, streams,
+place names, and so on) and topographic maps (again, with or without additional features).
+.PP
+The output is an image, in SUN rasterfile format, which can be viewed with your favorite
+image viewer, or converted to other forms for display or hard copy output.
+(My preferred viewing/converting packages are the "ImageMagick" package and the
+"xv" package.  However, I prefer them only in the sense that I have them on my system.
+I would imagine that other packages would be quite satisfactory as well.)
+.SH Map Projection Used by Drawmap
+.PP
+The only type of map projection currently supported is not technically a projection at all,
+but simply a grid of latitudes and longitudes.
+(The word "projection" is a mathematical term describing the "stretching" of the earth's
+roughly-spherical surface onto a flat map.)
+Over limited areas, this latitude/longitude grid approximates
+a family of projections called cylindrical projections.
+By default, the grid is square, in the sense that 1000 pixels in the latitude direction represent
+the same number of degrees as 1000 pixels in the longitude direction.
+You can make the grid non-square by playing with the "-x" and "-y" options.
+.P
+There are several reasons for using this approach.
+First, the 250K DEM data contain sample points that are evenly spaced in degrees
+of latitude and longitude, making a simple grid the natural choice.
+(Drawmap was originally written solely with 250K DEM data in mind.)
+Second, this is an intuitive projection for small-area maps.
+Over small areas, it approximates the Transverse Mercator projection,
+a cylindrical projection that is often used for topographic maps.
+Third, the USGS DLG data and 24K DEM data are
+specified in Universal Transverse Mercator (UTM) coordinates, which are based on
+a rectangular grid of x-y distances.  Over small areas, degrees of arc make a
+reasonable substitute for x-y distances, as long as the horizontal and vertical
+directions are appropriately scaled.
+And, finally, the latitude/longitude grid is also acceptable for large-scale maps.
+Since users of
+.I drawmap
+can produce maps covering any amount of
+territory, the latitude/longitude grid is a lowest-common-denominator that can
+handle any request.
+.SH Introduction to UTM
+.PP
+In order to find your way around the data, it is useful to know something about the UTM
+system, which is an international military standard that divides the world into 60 zones
+(like panels on a beach ball), each of which is
+6 degrees of longitude in width, and runs from 80 S to 84 N.
+A UTM projection, of a given zone, has a central meridian bisecting the map from top to bottom,
+which serves as a reference from which the locations of other features are derived.
+Zone 1 runs from 180W to 174W, with its central meridian at 177W.  Successive zones run to
+the east, with zone 2 beginning at 174W.
+.PP
+In the UTM system, the location of a feature is specified by its distance to the north of the equator, in
+meters, and its distance eastward from the central meridian, in meters plus 500,000.
+In the southern hemisphere, 10,000,000 is added to the distance north from the equator.
+(The purpose of the 500,000 and 10,000,000 offsets is to avoid having any negative distances.)
+.I Drawmap
+internally converts UTM distances into latitude/longitude coordinates before plotting features
+on a map.
+.PP
+Included with
+.I drawmap
+are two programs,
+.I utm2ll
+and
+.I ll2utm,
+that you can use to convert back and forth between UTM coordinates and latitude/longitude coordinates.
+You don't really need these to use
+.I drawmap,
+but they are useful in their own right.
+(Be sure to read the associated manual pages to get information on conversion accuracy.)
+.PP
+The result of the cylindrical projection is to map each one-degree by one-degree latitude/longitude
+patch (on the curved surface of the Earth) into a rectangular area (on the map projection).
+In the process, of course, the projection distorts shapes and areas
+as it stretches the beach-ball panels into flat rectangles;
+and these deviations get larger as the distances from the central meridian and equator increase.
+.PP
+Distortion may also occur due to the way the latitude lines are projected.
+In the classical Mercator projection, for example, the latitude lines are spaced farther and
+farther apart as they near the poles (reaching infinity at the poles themselves).
+This gives the map some useful directional properties,
+but grossly distorts shapes and areas near the poles.  (You can approximate this kind of stretching with
+.I drawmap
+by using the "-x" and "-y" options to vary the number of pixels per longitudinal or latitudinal degree;
+but remember that, within a
+.I drawmap
+map, latitude and longitude always vary linearly.)
+.PP
+It is a fact of life that mapping a sphere onto a flat piece of paper
+is going to produce distorted results.
+Various types of map projections are chosen for the ways they preserve one or more valuable
+features of a globe-shaped map (features like shape, area, distance, and direction).
+In the Transverse Mercator projection,
+the distortions are reasonable for points that are within several degrees of the central meridian,
+and for maps that aren't too near the poles.
+In fact, a cylindrical projection has the property that it is "conformal" in the mathematical
+sense, meaning that it preserves angles (and hence shapes and areas) within small areas of the resulting map.
+The classical Mercator projection is also conformal in the cartographic sense, meaning that it preserves
+angles everywhere on the map.  (In other words, if the great-circle route from Newark to Peoria is X degrees
+east-of-north on the globe, then a straight line from Newark to Peoria on a classical Mercator map is also at an
+angle of X degrees east-of-north.)  Of course, over large areas, the classical Mercator projection can
+grossly distort shapes and areas.
+.PP
+Since, with
+.I drawmap,
+you can define your own image boundaries, the output map may span any portion of one
+or more UTM zones, and zero or more central meridians may appear at arbitrary positions within the
+map boundaries.
+Over small areas, stretching latitude/longitude angles into a square grid (which is what
+.I drawmap
+does) produces roughly the same map image as a square grid of UTM coordinates would.
+Try to keep the map area smaller than a UTM zone, and center the map on a central
+meridian, if you want to use the map as a UTM surrogate.
+UTM coordinates are usually used for areas much smaller than a UTM zone, such
+as a 7.5-minute USGS quad.  For such small areas, the geometrical difference between
+latitude/longitude angles and surface distances is small.
+.SH Introduction to the Different File Types
+.PP
+At the time this manual page was updated (August, 2001), various DEM, DLG, and GNIS files
+were available for free download
+by following appropriate links from http://mapping.usgs.gov/.
+For some files, convenient graphical interfaces were available to let
+you locate desired files by clicking on a map.
+Some DEM, DLG, and GNIS files, and the GTOPO30 files too, were available via FTP
+from edcftp.cr.usgs.gov, in the pub/data directory.
+(In the case of GNIS files, there was simply a pointer to another download site.)
+Access to the various files changes over time, so you may have to do some searching to
+find what you want.
+.PP
+Ordinary DEM and DLG files (that is, non-SDTS and non-GTOPO30 DEM and DLG files)
+are in (gzip-compressed) ASCII text format,
+and are human readable (when uncompressed)
+except that they generally don't
+contain linefeeds to structure them into easily-editable lines of text.
+(Some newer DLG files do have linefeeds; and I have come across some DEM files with
+linefeeds also.)
+The web site provides information on how to add linefeeds and view the file contents, but
+.I drawmap
+is able to read and use the files in their native state (in
+.I gzip
+format, with a ".gz" suffix on the file name).
+.I Drawmap
+can also process the files in uncompressed form.  It is okay to have linefeeds in ordinary DLG files, as
+long as no line is longer than 80 bytes (including the linefeed); and
+it is okay to have linefeeds in ordinary DEM files, as long as no line is longer than 1024 bytes (including
+the linefeed).
+The
+.I drawmap
+distribution contains the
+.I block_dlg
+and
+.I block_dem
+programs to add appropriate linefeeds
+to DLG and DEM files but, beyond that, you are on your own if you want to muck around
+inside the files.
+.PP
+In general, you can add or remove records to or from a DLG or GNIS file, as long as you
+don't violate the record structure.  For example, I have added linefeeds to a DLG file (using the
+.I block_dlg
+program), deleted a record, added a record, and then used
+.I drawmap
+to process the file.
+If you want to do this sort of thing, then you may also want to get copies of the various
+guides and standards for the different kinds of files.  These documents are available
+through the web sites.
+.PP
+Using SDTS files is a bit more complicated.  SDTS data generally come in the form of
+.I tar
+archives, compressed with
+.I gzip.
+Each such archive should be unpacked into a separate directory.  This is
+true even if there are several archives in a single directory on the download
+site.  (Transportation archives, for example, normally come in triples --- one
+each for roads, railroads, and other transportation features.  These triple archives
+should be unpacked into three different directories to avoid files from one archive
+overwriting files from another.)
+.P
+When you provide SDTS files as input to
+.I drawmap,
+you don't have to include all of the unpacked files on the command line.
+For DEM files, each archive should contain one or more files with names like
+????CEL at .DDF, where the '?' symbol stands for any single character, and the '@'
+symbol stands for any single digit.
+Use one or more of these file names (each preceded by "-d")
+just as you would an old-style DEM file name, and
+.I drawmap
+will figure out the names of the other files in the archive.
+.PP
+For DLG files, each archive should contain one or more files with names like
+????LE@@.DDF.  Use one or more of these file names just as you would an optional-format
+DLG file name.
+There is also a Master Data Dictionary available for each kind of DLG file.
+At present,
+.I drawmap
+makes no use of these.
+.PP
+Once you have unpacked the archives, you can compress the individual files
+with
+.I gzip
+if you wish.  If you do compress them, compress every file that has a ".DDF" extension.
+You can also change the file names to all lower case, but don't
+mix and match upper and lower case files.
+Other than changing upper to lower case, DO NOT change the file names.
+.I Drawmap
+uses the file names to deduce what to do.
+.PP
+The GTOPO30 files also come in archives, and must be unpacked
+before use.  (You don't need to unpack each archive into a separate directory,
+but it isn't a bad idea.)  Once they are unpacked, you can compress the individual files
+if you wish, as long as you compress both the ".HDR" file and the ".DEM" file,
+which are the only files that
+.I drawmap
+uses.  (The same guidelines apply as for SDTS files:  try to be consistent
+with upper/lower case, compression, and the like.)
+.P
+There is one GTOPO30 archive that contains a Polar Stereographic projection of Antarctica.
+.I Drawmap
+can't handle that one.
+On the FTP site, there is also a gtopo30hydro directory.  The files in this directory are
+derived from GTOPO30 data, but use a Lambert Azimuthal Equal Area projection.
+.I Drawmap
+does not currently handle these either.
+.PP
+To use GTOPO30 files, simply invoke the "-d" option, and provide as a parameter the file
+whose name ends in ".HDR" (or ".HDR.gz" if you compressed the individual files).
+Use caution with GTOPO30 data.  Each data set spans a large area,
+and the memory needed to read it all in can be enormous.  You can limit the
+amount of memory required by using the "-l" option to restrict the range of the
+image to a subset of the available GTOPO30 data.
+.PP
+Be careful with downloads.  Some download software will uncompress gzip files during a download but still store the
+files with a ".gz" suffix.  Other download software will leave the data compressed, but remove
+the ".gz" suffix.
+.I Drawmap
+will become confused when this happens.  It relies on the suffix to determine the file type.
+.SH Drawmap Tidbits
+.PP
+If you provide all three types of data (DEM, DLG, and GNIS) as input, then
+.I drawmap
+will first produce a shaded relief map (or, when "-c" or "-C" is specified, a contour map),
+and then overlay it with data from the DLG files
+(with the data from each DLG file, in succession, being overlaid on all previous data), and
+then overlay everything with place names from the GNIS file.
+If you omit the DEM data, then the shaded relief (or contouring) is replaced by a simple white background.
+.PP
+.I Drawmap
+will take whatever information you provide and assemble a map containing
+just that information.
+If you provide information that falls outside of your specified map boundaries,
+it is simply ignored.
+If you supply any DEM data, and if you don't specify a contour map (via the "-c" or "-C" option),
+and if there is room, a color key will be placed at the
+bottom of the map to help you interpret the shaded relief.
+If you specify the "-c" or "-C" option, then a message about the contour interval will appear
+at the bottom of the map, if there is room.
+.PP
+Also, if there is room, a title will be placed at the top, containing the lowest and highest
+values of longitude and latitude for this map, and containing the latitude, longitude, and elevation of
+the points on the map of lowest and highest elevation.
+(Actually, of course, there may be multiple points on the map that attain
+the lowest or highest elevation, but
+.I drawmap
+shows only the first ones that it finds.
+Furthermore, for low-resolution output images, that have small
+x and y pixel dimensions relative to the granularity of the available DEM data,
+.I drawmap
+may be a little sloppy about the exact latitude and longitude,
+and about the exact maximum and minimum elevations.)
+If only one DEM file is supplied, the location name from the DEM file header will be included in
+the title.
+(Sometimes, it is hard to figure out exactly what the correct name is, so don't be
+surprised if the title looks a bit strange.)
+.PP
+Latitude and longitude tick marks will be placed around the map boundaries, with one
+tick every tenth of a degree.  Tick marks at full degrees and half degrees will be larger
+and (if there is room) will have text next to them that specifies the latitude/longitude.
+Tick marks can be turned off with the "-t" option.
+.PP
+North is always at the top of the map, and east is always at the right.
+.SH OPTIONS
+.TP
+.B \-l latitude_low,longitude_low,latitude_high,longitude_high
+You usually must provide latitude and longitude coordinates that define two diagonal corners
+of the image.  They must be separated by a comma or other non-space character
+(as in:  -l 34.3,-109,35.9,-109.713),
+and they must be in decimal degrees.
+Note that east longitude is positive and west longitude is negative.
+Similarly, north latitude is positive and south latitude is negative.
+If you only provide one "-d dem_file" option, then you can omit the "-l", and
+the corners of the single DEM file will be used to define the map boundaries.
+This is useful when you are simply trying to figure out what area a given DEM
+file covers.
+.TP
+.B \-L
+Print out the program license information and exit.
+.TP
+.B \-o output_file.sun
+You may provide an output file name.  It can be any name that you choose.
+By convention, SUN rasterfile images have a ".sun" file name extension,
+but you can omit it if you wish.
+If you provide no name, then "drawmap.sun" is used.
+(If you use the "-h" option, and provide no name, then "drawmap.pgm" is used.)
+.TP
+.B \-d dem_file
+You can provide as many DEM files as you want.  (There is a hard-coded limit of
+1000 files in the source code, but it is easily changed.)  Since each file covers a limited
+area, it can take quite a few to cover the image if you specify a
+large latitude/longitude range for the image boundaries.
+
+You don't, of course, have to provide enough files to cover the whole
+map area.  Areas not covered by a DEM file will simply have a white background.
+If you have selected the "-c" or "-C" option, there will be anomalous contour lines along
+the edges of these white areas.  If you are using 24K DEM data, there may also
+be anomalous contour lines around the outer boundaries of the map.
+
+The DEM files will be processed into multicolored shaded relief (or contour lines), serving
+as a background for any other features you add to the map.
+If you are trying to draw a contour map using hypsographic data from DLG files
+(as opposed to drawing a contour map using the "-c" or "-C" option, along with data from DEM files),
+then you probably don't want to provide any DEM files.  The DEM data would
+mix with the DLG contour lines to produce a confusing morass.
+
+Note that files are processed in the order given.  Thus, if you want to provide
+a 250K DEM file, and overlay parts of it with one or more 24K DEM files,
+then you want to have the 250K DEM file first in the argument list.
+This overlays the high-resolution data over top of the low-resolution data.
+Furthermore, the decision of whether or not to smooth the final image is
+made based on the last DEM file processed.  It is usually desirable to
+base this decision on the highest-resolution data present.
+.TP
+.B \-c contour_interval_in_meters
+This option has no effect unless you provide one or more DEM files.
+The DEM files are normally processed into multicolored shaded relief.
+If you include the "-c" option, then the shaded relief is replaced by
+a set of contour lines (orange lines on a white background)
+that represent elevations separated by the
+given contour interval (in meters).
+Note that it is also possible to generate contour lines by using
+data in hypsographic DLG files, making the "-c" option seem somewhat redundant.
+However, at the present time, the area covered by the available DEM files
+is a superset of the area covered by hypsographic DLG files.
+Furthermore, the "-c" option allows finer control over the spacing of contour
+lines than is available with hypsographic DLG data.  On the other hand,
+the DLG data is likely to be more precise about the locations of contours.
+.TP
+.B \-C contour_interval_in_meters
+This option is exactly the same as the "-c" option, except that it doesn't
+use a white background.  Instead, it fills in the areas between the orange
+contour lines using a rotating set of solid colors.  These distinct colors
+make it easier to follow elevation contours as they swirl around the map.
+(The colors come from the same set used to generate shaded relief, except
+that white is excluded because it tends to stand out too much from the
+other colors.)
+.TP
+.B \-g gnis_file
+Only one GNIS file is allowed.  This is not really a restriction since you
+can edit these files with an ordinary text editor, making them contain
+whatever place names you want to include.
+In fact, it is normally necessary to winnow out much of the available GNIS data;
+otherwise the map would be plastered nearly solid with place names.
+
+The GNIS data generally come in separate files, one for each US state.
+Files can be in one of two different formats:  a fixed-field-width format in which
+fields are padded out with white space, and a tokenized format in which
+the fields are separated by the delimiter "','".
+You can mix together records from both formats in your customized GNIS file.
+
+WARNING:  The format of both kinds of GNIS files has changed; and
+.I drawmap
+will not properly process the older files.  If place names don't
+show up on your maps, then you may need to download newer GNIS files.
+The newer files have records that begin with a postal code, like NJ, NY,
+or WY.
+
+The
+.I llsearch
+program (included in the
+.I drawmap
+package) allows you to extract all place names within a certain range
+of latitudes and longitudes.
+You can manually edit the resulting extracted data and make further reductions.
+Each GNIS entry has a field that denotes its type, such as "ppl" for
+a populated place and "summit" for a mountain top.  These fields can help
+you to narrow down your choices.
+
+The place names are added to the image on top of any other features that
+you choose to include.  A small "+" sign denotes the actual location of
+the feature.
+.TP
+.B \-a attribute_file
+There are three high-level types of objects in a DLG file:  Nodes (points where lines
+join), Areas, and Lines.
+These objects often have attribute codes associated with them.
+Each attribute code consists of a major code and a minor code.
+The major code denotes a particular general type of feature, such as 50 for hydrographic features.
+The minor code denotes a subtype, such as 412 for a stream, or 421 for a lake or pond.
+
+You can provide an attribute file to control what DLG information is
+included in the image.
+Each line in the file consists of a letter 'N', 'A', or 'L' (for Node, Area, or Line), followed by
+a pair of numbers to denote the major and minor codes, followed by any comments you choose to add.
+The fields should be separated by white space.
+Lines that begin with '#', or white space, are ignored.
+
+A negative number, for either the major or minor code, matches anything.  Thus, an attribute specification of
+"L -1 -1" will draw all lines in the DLG files, whether they have associated attribute codes or not.
+(Omitting the attribute file, or providing the "L -1 -1" attribute specification,
+ensures that every possible line is drawn, except for the "neatlines" that form a rectangle
+around the boundaries of the data from each DLG file.)
+If only the minor code is negative, then all lines of a given major type are drawn.
+(For example, an attribute specification of "L 050 -1" will match all hydrographic features.)
+
+At present,
+.I drawmap
+makes no use of Node data from the DLG file(s).
+Thus, there is no need for any "N" entries in the attribute file.
+
+If no attribute file is given,
+.I drawmap
+will ignore the Area data from the DLG file.
+If Area attributes are specified in an attribute file, then
+.I drawmap
+will attempt to fill the specified types of areas with the same color as the boundary lines that surround them.
+
+The chief use for this is to fill in lakes, reservoirs, and the like.
+However, because the area-filling algorithm is currently not very robust,
+and because the Area data in the DLG file can be somewhat ambiguous, it
+is possible for the outside of an area to be filled in instead of the inside.
+(I have had this happen often in practice, especially when stretching a map in one direction by
+specifying unusual map dimensions with the "-x" and "-y" options.)
+This potential problem is the reason why areas are not filled in unless you make an explicit
+request in an attribute file.
+
+Another common problem is that sometimes lakes or rivers will be only partially filled in.
+The reasons for this are beyond the scope of this manual page, but are discussed in
+comments in the
+.I drawmap
+source code.
+One solution to both of these problems is to not have
+.I drawmap
+fill any areas.  Instead, fill in the areas yourself using an image editor.
+
+The distribution for
+.I drawmap
+includes a file, called "attrib_codes," which is pulled from a USGS
+guide, and describes various major and minor codes.  The distribution also
+contains a sample attribute file, called "attributes."
+The sample attribute file contains Area attribute specifications that will
+cause lakes, ponds, streams, and reservoirs to be filled in.
+(Both of these files are probably somewhat dated.  More current information can
+be obtained by downloading the appropriate standards documents from the
+USGS.)
+
+Precious little error checking is done on the data in the attribute file, so be
+careful.
+
+There is a debugging feature associated with the attribute file.
+If you specify a major code of 10000, and a minor code of your choosing,
+then the minor code is taken to be a specific node, area, or line identifier.
+(Within each node, area, or line record in a DLG file, the first integer
+in the record is an identifier for the node, area, or line.  In general
+the nodes, areas, and lines are numbered sequentially,
+starting at 1.)
+
+By specifying Area or Line attributes with major codes set to 10000,
+you can draw individual areas or lines from a DLG file.
+This can be useful when you are trying to fine-tune a map or find the
+source of some problem.
+When using this feature, it is probably not a good idea
+to include more than one DLG file in the input arguments.
+This is because the Node, Area, and Line identifiers are unique within
+individual files but are re-used from file to file.
+Thus, if you specify multiple DLG files, you may have a hard time figuring out
+which file is the source of each area or line on the output map.
+
+Roads and trails show up in red, pipelines and railroads in black,
+hydrographic features in blue, hypsographic data in orange,
+boundaries in gray, vegetative features in green, and other data in black.
+.TP
+.B \-x x_size and \-y y_size
+The horizontal and vertical dimensions of the map, in picture elements (pixels),
+can be specified via the x and y options.
+You can supply either or both of them.  If you don't provide them, they will be
+selected so that 250K DEM data can be displayed at one half of full resolution.
+
+As a special case, if you give only a single DEM file, and don't use the 
+"-l", "-x", or "-y" options,
+.I drawmap
+will automatically produce a complete map at full resolution.
+
+For most 250K DEM files, full resolution is 1200 pixels per degree of longitude or latitude,
+but it is 400 or 600 pixels per degree of longitude for Alaskan files.  The full resolution of
+GTOPO30 files is 120 pixels per degree.
+For 24K DEM files, resolution is more complicated.
+The data in these files are sampled on a uniform UTM grid instead of on a
+latitude/longitude grid, and the elevations may be sampled at spacings of
+10 or 30 meters, and the number of samples varies with latitude.
+Thus, the resolution (in terms of latitude and longitude) can vary considerably from
+file to file.  I use 3600 pixels per degree as a very rough rule of thumb.
+It is a reasonable approximation for files (with 30-meter spacing) in equatorial
+regions, but becomes considerably less accurate as one moves north or south.
+I like this particular number because it is exactly equal to the number
+of arc-seconds in a degree.
+
+It is generally desirable to specify small x and y values, when you are first trying to fine
+tune your map, because (at full resolution) even a single one-degree block covers a 1200 by 1200 image,
+which is larger than many display screens.
+
+Note that the x and y values define the boundaries of the actual map area, but
+do not define the size of the output image.
+.I Drawmap
+also adds a white border around the image, which makes the output image
+a bit larger than the x and y values would otherwise imply.
+
+Note also, that it is best to choose x and y values that are some integer multiple
+or sub-multiple of the resolution of the DEM data.  For 250K DEM data, the resolution
+"1200 times the width and height of the image in degrees of
+longitude and latitude."  For example, if the image is to cover an area that is 0.1
+degree square, then the automatically-chosen value for x and y is 60, and full
+resolution would require x and y to be set to 120.
+If you want to specify your own dimensions with "-x" and "-y", then it is best to choose
+an integer multiple or sub-multiple of the full resolution of 120.
+Choices, in this case, might include 30, 120, 240, and so on.
+If you choose strange values for x and y, then the program may produce shaded
+relief that contains odd-looking linear artifacts.
+If you aren't providing DEM data, then you don't need to worry about this constraint.
+
+Similar comments apply to DEM files for Alaska, except that full resolution
+is 400 or 600 pixels per degree of longitude.
+GTOPO30 files are also similar to 250K DEM files, but their full resolution is
+ten times smaller.
+
+The situation for 24K DEM files is more complicated, since they aren't perfectly
+rectangular.  You may have to try a few different "-x" and "-y" values until
+you get good results.  One starting point is to provide a single 24K DEM file,
+without using the "-l", "-x", or "-y" options.
+.I Drawmap
+will display the image in full resolution, and will tell you what x and y values
+it picks.  (Alternatively, you can use the "-i" option to print out some
+information about the DEM file, including its extent in both the x and y
+directions.)  You can use this information to derive approximate x and y values
+for maps that contain multiple 24K DEM files.  However, because of the odd
+shapes of the 24K quads, you may still have to "twiddle" your derived values
+for the best results.
+
+Be careful about choosing x and y values that are near, but not
+equal to, the full resolution of the data.  Under these conditions,
+.I drawmap
+has a hard time
+transferring the data to the image without creating some image blemishes.
+As an example, if the DEM data has
+a resolution of 1200 elevations per degree, then an x or y value of
+1190 or 1210 would not be the best choice.  These values for x or y would be
+likely to result in a checkerboard effect in areas where the elevation changes
+slowly.
+
+Note that, when the resolution of the source data doesn't match the resolution
+of the desired image,
+.I drawmap
+may silently apply a filter to the source data, or to the output image, to blur
+things out somewhat.  This can improve the appearance of the completed map.
+When the resolutions are about the same, no filtering is done, because I
+prefer isolated image blemishes to non-localized blurring of the map.
+.TP
+.B \-w
+The DLG files describe bodies of water within land areas.  However, they don't
+generally provide polygonal areas to define sea-level water in coastal areas.
+When you use the "-w" option,
+.I drawmap
+will attempt to make ocean areas bright blue, just like the inland waterways.
+This feature is provided as an option, rather than as the default, because it
+sometimes produces odd results.  For example, some DEM data in the
+Sacramento, CA, area give elevations below sea level.  With the "-w" option,
+the map ends up with anomalous-looking sub-sea areas surrounded by water.
+(This representation may, in fact, be correct.  The areas may be polders,
+pumped out for farming purposes.  I don't know.  But they look odd.)
+.TP
+.B \-n color_table_number
+.I Drawmap
+provides a choice of four color schemes for shaded relief.
+The default is color table 2, which provides a natural-looking color scheme.
+Using the "-n" option, you can instead choose color table 1, a very-neutral non-obtrusive
+scheme; color table 3, a natural-looking but garish scheme reminiscent of
+maps in school textbooks; or color table 4, a very garish scheme designed to provide good
+perception of variations in elevation.
+From 1 to 4, the tables are ranked in order of increasing obtrusiveness.
+
+Note that the natural scheme isn't perfect.  What looks natural for seacoast plains and
+mountains may not look as good for highland plains and mountains.  The selected
+color scheme is a compromise.  If you are adventurous, you can modify the software
+to provide additional color tables of your own devising.  The software is
+specifically designed to make such modifications reasonably painless, as long as you are
+familiar with the 'C' programming language.
+
+For elevations below sea level, drawmap simply re-uses one of the colors
+used above sea level.  A grayish or blueish color is selected, if possible.
+The reason for the re-use is that sub-sea-level areas are rare, and
+color table space is a scarce commodity.
+.TP
+.B \-r relief_factor
+Normally, when drawing shaded relief,
+.I drawmap
+manipulates the colors in the color table so as to
+provide maximum sharpness in the relief.
+In other words, the shading varies from full brightness all
+the way to black.
+
+You can use the "-r" option to change this behavior.  A relief_factor
+of 1.0 duplicates the default behavior, and is the same as providing
+no "-r" option at all.  A relief_factor of 0.5 causes the colors to
+shade from full brightness down to half brightness.  A relief_factor
+of 0.0 yields full-brightness color bands, with no shaded relief at all.
+(The "-r" option has no effect when you are requesting contours with
+the "-c" or "-C" option.)
+
+You can provide any relief_factor you want, as long as it falls in the range
+from 0.0 through 1.0.
+However, keep in mind that the color tables are not infinitely adjustable.
+As you vary the relief_factor from 0 through 1, the color scheme will
+change, at most, eighteen times.  Thus, it is pointless to provide lots of
+digits of precision in the relief_factor.
+
+One use of this feature is to provide faint shaded relief, as a background
+for data you consider more important (such as roads on a road map).
+For this application, you might choose color table 1 or 2, with a relief_factor of 0.1.
+.TP
+.B \-m relief_magnification
+Some regions of the world are relatively flat, with only minor relief.
+In such regions, it may be desirable to make the relief stand out more
+sharply.  The "-m" option allows you to supply a magnification factor
+that enhances elevation differences.  The factor must be greater than
+or equal to 1.0, and the default value is 1.0.
+(The "-m" option has no effect when you are requesting contours with
+the "-c" or "-C" option.)
+
+In order to use this feature, it is useful to know a little about how
+shaded relief is generated.  We begin by assuming that the sun is
+shining from the northwest, so that slopes facing to the north or west
+will be more brightly lit than slopes facing south or east.
+At any given point on the map, we first note the exact elevation of the point.
+This information is used to select the overall color at that point,
+such as green, yellow, red, brown, and so on.
+We then find the difference in elevation between
+the given point and a couple of nearby points.
+These results are used to make a
+rough estimate of the direction the land is sloping at the given point.
+This estimate is then used to modulate the light/dark shading of the
+point to reflect the degree to which the point is in sun or in shade.
+The actual degree of light/dark shading is the result of a hand-tuned
+algorithm, developed largely through trial and error.
+
+When you provide a magnification factor, the height differences
+(between the given point and its neighbors) are multiplied by the given factor.
+Thus, if a given height difference is Z meters, and the magnification factor
+is 2, the shading is done as if the height difference were 2Z meters.
+This may result in a somewhat brighter highlight, or a somewhat deeper shadow,
+or no noticeable change, depending on the direction that the land is sloping.
+Note that the actual elevation of the point is not modified.
+Thus, if the elevation
+calls for the point to be green, it will remain green no matter how
+large a magnification factor you provide.  The only impact of the "-m"
+option is to modify the light/dark shading at each point.
+
+Don't expect amazing results.  The shading calculations are not linearly
+related to height differences, so the magnification factor has
+only a limited effect.
+To maximize perception of height differences,
+you might want to try the "-z" option, with or without the "-m" option.
+Remember too that
+.I drawmap's
+shading is only a crude simulation of the light and shadow of real relief.
+If you want more realistic shading, you might want to use the "-h" option
+to generate a file of elevation data, which can be imported into a ray-tracing
+program to produce a more realistic three-dimensional appearance.
+.TP
+.B \-z
+When the given DEM data span a small range of elevations, shaded relief
+uses only a small portion of the color table.
+In fact, if the range of elevations is small enough, the entire
+map may end up using only a single color, with whatever
+light/dark shading is called for by the limited roughness of the terrain.
+This results in a pretty boring map.
+
+For these situations,
+.I drawmap
+provides the "-z" option, which specifies that the entire range of
+available colors be used to represent the given terrain.
+For example, assume that the data only contain elevations between
+4567 feet and 5799 feet.  Normally (depending on the chosen color table),
+the color "green" might represent elevations from 0 feet to 1000 feet,
+and thus no green would appear in the map.
+With the "-z" option, however, green will instead represent
+elevations from 4567 feet to 4655 feet, and will show up in the low-lying
+areas of the map.  All of the other available colors will also show up,
+each representing its proportion of the elevation range.
+(The "-z" option has no effect when you are requesting contours with
+the "-c" or "-C" option.)
+.TP
+.B \-i
+When you provide this option, drawmap doesn't produce a map.
+Instead it prints out some useful information about all of the DEM and DLG
+files that you specify on the command line.
+
+For a DEM file, the information includes:  the file name, the DEM name, the latitude/longitude
+of the southeast and northwest corners, the minimum and maximum elevation, the number of samples
+in the x and y directions, and an indication of whether or not the file contains linefeeds.
+
+For a DLG file, the information includes:  the file name, the DLG name, the postal codes touched
+by the file (e.g. MT, TX, RI), the type of data present in the file,
+the latitude/longitude of the southeast and northwest corners,
+and an indication of whether or not the file contains linefeeds.
+
+.I Drawmap
+may not always be able to find the postal codes in a DLG file, so don't
+be upset if the field is blank.
+In DEM files, the DEM name may contain some postal-code information, but not always.
+SDTS files aren't human-readable, so their linefeed information is omitted.
+
+When the "-i" option appears, all other options are ignored except the "-d" option.
+.TP
+.B \-h
+When you provide this option,
+.I drawmap
+doesn't produce a map.
+Instead it takes the DEM information and produces a height-field
+file, in Portable Graymap (PGM) format.  The file is readable ASCII,
+beginning with the line "P2", which identifies the file as a PGM file.
+The next line contains the x and y dimensions, and the maximum
+elevation in the file, separated by white space.
+Then the file includes all of the elevation samples, one per line, beginning
+with the top west-to-east row, and followed by all of the other rows in sequence.
+Finally, there are some commentary lines containing information about the data,
+including the latitude/longitude of the southeast and northwest corners,
+and the minimum and maximum elevations.
+
+This file is suitable for use by ray tracing programs
+(such as the readily-available
+.I POV-Ray(tm)
+program) to produce 3-dimensional renderings of terrain.
+(It is also viewable by some image viewers, such as the "xv" viewer, and
+can be used as input to custom-built programs that process elevation data.)
+
+Unless you select a file
+name, with the "-o" option, the file will be called "drawmap.pgm".
+
+Any elevations less than zero are bumped up to zero, and any areas of the
+image that contain no DEM data have their elevations set to zero.
+(In the latter case, the points are not included when determining the
+minimum elevation in the file.  In the former case, the minimum elevation
+will be zero.)
+
+.I Drawmap
+will also generate a file called "drawmap.pov".  This file is a rough
+attempt at a
+.I POV-Ray
+(version 3) file which, together with the PGM file,
+can be used to produce a rendering of the 3-dimensional terrain.
+The file will probably require manual
+editing to get things the way you want them, but it is at least a start.
+There are some minimal instructions, embedded in the file as comments,
+but you are assumed to be familiar with
+.I POV-Ray
+before you use the "-h" option.
+.TP
+.B \-t
+Normally,
+.I drawmap
+will put tick marks and latitude/longitude numbers around the borders of the map.
+However, for maps that span large regions of the earth, these tick marks and numbers
+can overlap and interfere with one another.
+.I Drawmap
+makes a limited attempt (with emphasis on the word "limited")
+to reduce the density of the markings as the map area increases.
+Rather than try to adapt to any situation, though, I chose to provide
+the "-t" option, which totally shuts off production of tick marks and latitude/longitude
+legends.  It is for use in situations where the border markings become cumbersome.
+.TP
+.B dlg_file
+Any argument that doesn't match any of the above options is assumed to be a DLG file.
+You can add as many as you like.
+Note that files are processed in the order given, and each file is overlaid by the
+ones that come after it.
+Thus, you generally want to put "transportation" files after "hydrography" files,
+so that roads will be shown as crossing over streams instead of the other way
+around.
+.SH EXAMPLES
+To generate a simple shaded relief map for a portion of the southern California coast,
+with the size of the map set to a reduced resolution of 300x300 pixels (full
+resolution would be 1200x1200):
+.PP
+drawmap -d santa_ana-w.gz -l 33,-117,34,-118 -x 300 -y 300
+.PP
+To extract the upper right quadrant of the above map, and display it at full resolution:
+.PP
+drawmap -d santa_ana-w.gz -l 33.5,-117,34,-117.5 -x 600
+        -y 600
+.PP
+To add in some place names from a GNIS file (that you have prepared in advance, using llsearch):
+.PP
+drawmap -g gnis_santa_ana_west -d santa_ana-w.gz
+        -l 33.5,-117,34,-117.5 -x 600 -y 600
+.PP
+To add in some DLG files for hydrography:
+.PP
+drawmap -g gnis_santa_ana_west -d santa_ana-w.gz
+        -l 33.5,-117,34,-117.5 -x 600 -y 600
+        santa_ana-e_CA/hydrography/522274.HY.opt.gz
+        santa_ana-e_CA/hydrography/522275.HY.opt.gz
+        santa_ana-e_CA/hydrography/522276.HY.opt.gz
+        santa_ana-e_CA/hydrography/522279.HY.opt.gz
+.PP
+To draw a map of western Europe, using GTOPO30 data, first
+download the w020n90.tar.gz and w020n40.tar.gz archives,
+and unpack them by typing:
+.PP
+gunzip -c w020n90.tar.gz | tar xf -
+.PP
+gunzip -c w020n40.tar.gz | tar xf -
+.PP
+Then draw a map by typing:
+.PP
+drawmap -t -x900 -y1200 -w -l30,20,70,-10 -d W020N90.HDR
+        -d W020N40.HDR
+.SH LIMITS
+As distributed,
+.I drawmap
+is limited to 1000 DEM files, one GNIS file, and one attribute file.
+The DEM limit is easily changed in the code.
+As explained above, the GNIS limitation is not really a limitation, since you can
+concatenate as many GNIS records as you want into a single file.
+I'm not sure how to implement multiple attribute files, or even what they would
+be used for.
+The number of DLG files is only limited by your system's limits on command-line length.
+.PP
+Another limitation arises from the fact that
+.I drawmap
+must be able to read all of the input data into memory.  If you want to produce
+large maps, then you must have large memory.
+.PP
+When dealing with 24K DEM data, there will often be visible seams
+between the data from different files.  There are several reasons for this.
+First, there can be marked differences in data quality between files.
+Lower quality data can have a lot of anomalous "fuzz", which forms
+discontinuities with adjacent data of higher quality.
+Even if one ignores other sources of discontinuity between data blocks,
+the visual difference between the two quality levels can be quite obvious.
+.PP
+Second, the calculations used to map raw data into the image
+may produce discontinuities, because numeric rounding
+may pull a data point one way, at the edge of one
+block of data, and may push an adjacent data point the other way,
+at the edge of the adjacent block of data.
+.PP
+Third, it goes without saying that there may be residual bugs in
+the code that handles the DEM files.
+.PP
+Finally, there may be flaws in the data itself.
+For example, some 24K SDTS DEM files, produced before January 2001, are
+known to have small positional errors.  (The non-SDTS DEM files don't
+suffer from this problem.)
+.PP
+Similar seams may appear between blocks of GTOPO30 data, but aren't
+usually as obtrusive.
+.PP
+There is another issue involving 24K DEM data.
+Each 24K quad represents a latitude/longitude square, with
+one eighth of a degree on each side.
+However, the native coordinate system for 24K DEM quads is a UTM grid.
+In UTM coordinates, the latitude/longitude square becomes an approximate quadrilateral,
+which often has no two sides of the same length.  The four sides of the
+quad will usually be tilted at slight angles to the UTM axes.  It is this odd-shaped
+quad that is stored in a 24K DEM file, as a set of elevation samples that are
+evenly-spaced in UTM coordinates.  (The spacing is normally 10 meters or 30 meters.)
+Different columns of sample points may contain different numbers of samples,
+depending on where the columns intersect the slanting sides of the quad.
+.PP
+An evenly-spaced collection of UTM points does not map onto an evenly-spaced
+set of latitude/longitude points.
+In order to map the UTM data onto a latitude/longitude grid,
+.I drawmap
+must warp the points into new relative positions, turning the
+unevenly-shaped UTM quadrilateral into a latitude/longitude square.
+(You might picture this by imagining an odd-shaped quad, which is
+cut out of a rubber sheet and covered with a uniform grid of dots,
+and which is then stretched into a perfect square.)
+During this warping process, rounding quantization can produce some diagonal
+artifacts in the map image.
+(We could sidestep this issue by making
+.I drawmap
+produce maps using an optional UTM grid, rather than always using a
+latitude/longitude grid.  However,
+.I drawmap
+is not presently so endowed.)
+.PP
+Rounding quantization may also sometimes produce anomalous vertical and
+horizontal linear features on the map, in the form of small discontinuities in
+the changing elevation.  This can happen for data of any level of detail;
+and is a result of trying, for example, to stretch a 100-by-100 grid of
+data points to cover a 101-by-101 grid of image pixels.  The data don't
+quite fit, and something, somewhere, has to give.
+.PP
+Whether artifacts are horizontal, vertical, or diagonal, their incidence
+can sometimes be reduced by adjusting the values of the "x" and "y"
+image dimensions.
+However, while this is usually straightforward for 250K DEM or GTOPO30 data,
+it can be a challenge for 24K DEM data.
+This is because, while
+.I drawmap
+can tell you the height and width, in UTM samples, for the raw 24K DEM
+data, it doesn't know how to figure out an optimal width and height
+after the data are warped onto a latitude/longitude grid.
+Furthermore, when you provide more than one 24K DEM file, there is
+no truly optimal width and height for the image, because
+the quadrangle covered by each file has a slightly different shape from
+the quadrangles adjoining it.  What works well for one quad may
+not be the best for another.
+I don't have a general rule of thumb for adjusting the width and height.
+I usually just try a few minor tweaks, to the "x" and "y" values, and pick
+the one I like the best.
+.PP
+Faced with various possible image artifacts,
+.I drawmap
+tries to smooth things out, but faces a tradeoff between making the image
+look good (and blurring some of the good data), or
+leaving the good data unaltered (resulting in some esthetic imperfections).
+.I Drawmap
+tends to err on the side of leaving good data untouched at the expense of
+leaving some artifacts in isolated spots on the image.
+It tries hardest to preserve the pristine data when the map image dimensions
+are approximately the same as the resolution of the raw data.  This is
+because, in such cases, there is approximately a one-to-one mapping between
+the raw data and pixels in the image.  It seems useful to preserve this
+correspondence, and not to blur it with smoothing algorithms.
+.PP
+When you specify image dimensions that differ considerably from the resolution
+of the data,
+.I drawmap
+takes more liberties in its attempt to produce pleasing results.
+If the map resolution is greater than the resolution of the data, then
+.I drawmap
+must replicate data points in order to fill up the map.  It does some smoothing
+on the finished image to reduce the resulting "checkerboard" effect.
+If the data have considerably greater resolution than the map image, then
+.I drawmap
+has more data than it needs, so it
+averages adjacent data points to determine each elevation in the map.
+Thus, one alternative for reducing image artifacts is to produce a map at,
+say, half resolution.
+If, for example, a 24K DEM file has a 300-by-400 sample grid,
+then you might try drawing a map with "x" set to 150 and "y" set to
+200.  The averaging operation will then smooth the data, which
+will usually reduce image artifacts.
+.PP
+If you are using the "-c" or "-C" option, and the given DEM files do not fully cover
+the image, there may be anomalous contour lines along the borders of the valid data.
+(This happens when you fail to completely tile the image with DEM files.)
+This problem is a result of the way the contours are produced.  It may get fixed
+some day, but isn't a high priority since it is usually hard to mistake these
+anomalies for valid data.
+.PP
+The code that reads SDTS files is not a complete implementation of all of the
+relevant standards involved in SDTS.
+In particular, SDTS relies on the ISO 8211 standard, and it would not
+be at all difficult to construct a valid ISO 8211 file that
+.I drawmap
+would be unable to read.
+The code is intended to be smart enough to read SDTS files from the USGS,
+and hopefully from other sources, but it is not necessarily
+smart enough to read any file you might throw at it.
+If you find a USGS SDTS file that
+.I drawmap
+can't read, I would be interested in hearing about it.
+(I can't promise to fix the problem, because the range of possibilities is
+large, and I don't want to end up trying to support every dialect that
+happens to pop up.)
+.PP
+Most of the SDTS DEM files I have examined store elevations as binary
+two's-complement integer numbers.
+Some files, however, store them as binary floating-point numbers,
+in IEEE 754 format.
+When it encounters such a file,
+.I drawmap
+simply assumes that your computer uses IEEE 754 floating
+point as its native floating point format.  If this assumption is
+not true, then the file won't be correctly parsed.
+.PP
+There may be some DLG attribute codes that are not properly handled.
+While I have downloaded and processed thousands of DLG files, in the various
+supported formats, I can't be sure that this subset of the available files
+spans the full range of possibilities.
+Also, it is not always clear, in the relevant specifications documents,
+exactly how attributes should be encoded, in either the old-style DLG files
+or the newer SDTS DLG files.
+I know of at least a few ambiguities that I am not sure how to handle.
+These are documented in the source code.
+Furthermore, there are numerous special cases, some of which appear to involve
+relatively small subsets of the USA.
+I put a lot of effort into trying to properly process the attributes,
+but it is difficult to test every possible situation, and my patience
+for dealing with finicky details is not infinite.
+.SH SEE ALSO
+.I llsearch(1), utm2ll(1), ll2utm(1), block_dlg(1), block_dem(1), sdts2dem(1), sdts2dlg(1), pgm(1)
+\" =========================================================================
+\" drawmap.1 - The manual page for the drawmap program.
+\" Copyright (c) 1997,1998,1999,2000,2001  Fred M. Erickson
+\"
+\" This program is free software; you can redistribute it and/or modify
+\" it under the terms of the GNU General Public License as published by
+\" the Free Software Foundation; either version 2, or (at your option)
+\" any later version.
+\"
+\" This program is distributed in the hope that it will be useful,
+\" but WITHOUT ANY WARRANTY; without even the implied warranty of
+\" MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+\" GNU General Public License for more details.
+\"
+\" You should have received a copy of the GNU General Public License
+\" along with this program; if not, write to the Free Software
+\" Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
+\" =========================================================================

Added: packages/drawmap/branches/upstream/current/drawmap.c
===================================================================
--- packages/drawmap/branches/upstream/current/drawmap.c	                        (rev 0)
+++ packages/drawmap/branches/upstream/current/drawmap.c	2007-05-18 12:29:03 UTC (rev 834)
@@ -0,0 +1,3225 @@
+/*
+ * =========================================================================
+ * drawmap - A program to draw maps using data from USGS geographic data files.
+ * Copyright (c) 1997,1998,1999,2000,2001  Fred M. Erickson
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2, or (at your option)
+ * any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program; if not, write to the Free Software
+ * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
+ * =========================================================================
+ *
+ *
+ * Program to process 250K Digital Elevation Model (DEM),
+ * 24K Digital Elevation Model,
+ * 100K (optional-format) Digital Line Graph (DLG),
+ * 24K Digital Line Graph,
+ * and Geographic Names Information System (GNIS)
+ * files and produce colored maps in SUN Rasterfile format.
+ *
+ * At the time this program was written, some DEM, DLG, and GNIS files were available
+ * for free download by following appropriate links from http://mapping.usgs.gov/
+ */
+
+
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <math.h>
+#include <stdlib.h>
+#include <unistd.h>
+#include <stdio.h>
+#include <errno.h>
+#include <time.h>
+#include <string.h>
+#include "drawmap.h"
+#include "raster.h"
+#include "colors.h"
+#include "dem.h"
+#include "font_5x8.h"
+#include "font_6x10.h"
+
+
+
+#define VERSION "Version 2.5"
+
+
+
+/*
+ * data from the header of a dem file
+ */
+struct dem_record_type_a dem_a;
+struct dem_record_type_c dem_c;
+
+long x_prime;
+
+long bottom_border = BOTTOM_BORDER;
+extern long right_border;	// Defined and initialized in dlg.c because needed in programs that don't include drawmap.o
+
+// long histogram[256];	/* For debugging. */
+// long angle_hist[100000];	/* For debugging. */
+// long total;	/* For debugging. */
+
+
+#define CONTOUR_INTVL	(100.0)
+
+long get_factor(double);
+void add_text(struct image_corners *, char *, long, long, long, unsigned char *, long, long, long, long);
+void get_short_array(short **, long, long);
+void gen_texture(long, long, struct color_tab *, char *);
+
+
+void
+usage(char *program_name)
+{
+	fprintf(stderr, "\nDrawmap, %s.\n\n", VERSION);
+	fprintf(stderr, "Usage:  %s [-L]\n", program_name);
+	fprintf(stderr, "          [-o output_file.sun] [-l latitude1,longitude1,latitude2,longitude2]\n", program_name);
+	fprintf(stderr, "          [-d dem_file1 [-d dem_file2 [...]]] [-a attribute_file] [-z] [-w]\n");
+	fprintf(stderr, "          [-c contour_interval] [-C contour_interval] [-g gnis_file] [-t]\n");
+	fprintf(stderr, "          [-x x_size] [-y y_size] [-r relief_factor] [-m relief_mag] [-i] [-h]\n");
+	fprintf(stderr, "          [-n color_table_number] [dlg_file1 [dlg_file2 [...]]]\n");
+	fprintf(stderr, "\nNote that the DLG files are processed in order, and each one overlays the\n");
+	fprintf(stderr, "last.  If you want (for example) roads on top of streams, put the\n");
+	fprintf(stderr, "transportation data after the hydrography data.  Note also that\n");
+	fprintf(stderr, "latitude/longitude values are in decimal degrees, and that east and north\n");
+	fprintf(stderr, "are positive, while west and south are negative.\n");
+	fprintf(stderr, "A contour interval specified with the -c or -C option must be in meters.\n");
+}
+
+
+
+void
+license(void)
+{
+	fprintf(stderr, "This program is free software; you can redistribute it and/or modify\n");
+	fprintf(stderr, "it under the terms of the GNU General Public License as published by\n");
+	fprintf(stderr, "the Free Software Foundation; either version 2, or (at your option)\n");
+	fprintf(stderr, "any later version.\n\n");
+
+	fprintf(stderr, "This program is distributed in the hope that it will be useful,\n");
+	fprintf(stderr, "but WITHOUT ANY WARRANTY; without even the implied warranty of\n");
+	fprintf(stderr, "MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n");
+	fprintf(stderr, "GNU General Public License for more details.\n\n");
+
+	fprintf(stderr, "You should have received a copy of the GNU General Public License\n");
+	fprintf(stderr, "along with this program; if not, write to the Free Software\n");
+	fprintf(stderr, "Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.\n");
+}
+
+
+
+main(int argc, char *argv[])
+{
+	long i, j, k, l, m, n;
+	long tick_width;
+	double f;
+	long file_index;
+	long xx, yy;
+	double red, green, blue;
+	unsigned char a, b, c, d;
+	long *lptr;
+	long lsize;
+	long smooth[SMOOTH_MAX + SMOOTH_MAX + 1][SMOOTH_MAX + SMOOTH_MAX + 1];
+	long smooth_size;
+	double gradient, gradient1, gradient2, gradient3;
+	double fraction;
+	double latitude;
+	double longitude;
+	long factor;
+	long angle;
+	long sum;
+	long sum_count;
+	struct rasterfile hdr;
+	unsigned char  map[3][256];
+	int dem_fdesc;
+	int gnis_fdesc;
+	int dlg_fdesc;
+	int output_fdesc;
+	ssize_t ret_val;
+	long length;
+	long start_x, start_y;
+	unsigned char buf[DEM_RECORD_LENGTH];
+	char *ptr;
+	unsigned char *tok_ptr;
+	unsigned char *font;
+	long font_width, font_height;
+	time_t time_val;
+	unsigned char dem_name[135];
+	long dem_flag;
+	long contour_flag;
+	long capital_c_flag;
+	long seacoast_flag;
+	long info_flag;
+	long height_field_flag;
+	long color_table_number;
+	long smooth_data_flag;
+	long smooth_image_flag;
+	long z_flag;
+	long tick_flag;
+	double relief_factor;
+	double relief_mag;
+	double y_gp_1, x_gp_1, y_gp_2, x_gp_2;
+	double latitude1, longitude1, latitude2, longitude2;
+	long tmp_width, tmp_height, tmp_x, tmp_y;
+	char *dem_files[NUM_DEM];
+	long num_dem, num_dlg;
+	char *gnis_file;
+	char *attribute_file;
+	char *output_file;
+	long option;
+	long x_low, x_high, y_low, y_high;
+	double res_y, res_xy;
+	short *image_tmp;
+	short *image_in = (short *)0;
+	long gz_flag, lat_flag;
+	double contour_trunc;
+	double contour_intvl = CONTOUR_INTVL;
+	long max_elevation = -100000, min_elevation = 100000;
+	long min_e_lat;
+	long min_e_long;
+	long max_e_lat;
+	long max_e_long;
+	unsigned char *gnis_feature_name;
+	char save_byte;
+	struct image_corners image_corners;
+	struct dem_corners dem_corners;
+	double res_x_data, res_y_data, res_x_image, res_y_image;
+	long c_index_sea;
+	struct color_tab *color_tab;
+	short *sptr, *sptr2, *sptr_down, *tmp_row;
+	short s0, s1, s2;
+	ssize_t (*read_function)();
+	FILE *pgm_stream;
+	struct datum datum =  {
+		/* Fill in the datum parameters for the default program-wide datum:  NAD-27. */
+		NAD27_SEMIMAJOR,
+		NAD27_SEMIMINOR,
+		NAD27_E_SQUARED,
+		NAD27_F_INV,
+		UTM_K0,
+		NAD27_A0,
+		NAD27_A2,
+		NAD27_A4,
+		NAD27_A6,
+	};
+	struct datum dem_datum;	// The datum of a given DEM file
+	long sdts_flag;		// When nonzero, we are processing an SDTS file
+	long gtopo30_flag;	// When nonzero, we are processing a GTOPO30 file
+	long byte_order;
+	double utm_x, utm_y;
+	long utm_zone;
+
+	if (argc == 1)  {
+		usage(argv[0]);
+		exit(0);
+	}
+
+
+	/* Process arguments */
+	image_corners.x = -1;
+	image_corners.y = -1;
+	image_corners.sw_lat = 91.0;
+	image_corners.sw_long = 181.0;
+	image_corners.ne_lat = -91.0;
+	image_corners.ne_long = -181.0;
+	gnis_file = (char *)0;
+	attribute_file = (char *)0;
+	output_file = (char *)0;
+	num_dem = 0;
+	dem_flag = 0;		/* When set to 1, this flag says that at least some DEM data was read in. */
+	contour_flag = 0;	/* When set to 1, this flag says that we should produce contours instead of shaded relief. */
+	capital_c_flag = 0;	/* When set to 1, this flag indicates that the user specified '-C' instead of '-c' */
+	lat_flag = 0;		/* When set to 1, this flag says that either the user explicitly specified the map boundaries, or we took them from the DEM data. */
+	seacoast_flag = 0;	/* When set to 1, drawmap attempts to fill in the sea with B_BLUE */
+	info_flag = 0;		/* When set to 1, drawmap prints out information about DEM and DLG files and does nothing else */
+	z_flag = 0;		/* When set to 1, drawmap adjusts the elevations in the color table so as to use the entire table */
+	tick_flag = 1;		/* When set to 1, tick marks and numeric latitudes/longitudes are added around the map. */
+	height_field_flag = 0;	/* When set to 1, drawmap generates a height-field file instead of an image. */
+	color_table_number = 2;	/* Select default color scheme. */
+	opterr = 0;		/* Shut off automatic unrecognized-argument messages. */
+	relief_factor = -1.0;	/* Valid values are real numbers between 0 and 1, inclusive.  Initialize to invalid value. */
+	relief_mag = 1.0;	/* Valid values are real numbers between 0 and 1, inclusive.  Initialize to default value. */
+
+	while ((option = getopt(argc, argv, "o:d:c:C:g:a:x:y:r:m:l:n:Lwihzt")) != -1)  {
+		switch(option)  {
+		case 'o':
+			if (output_file != (char *)0)  {
+				fprintf(stderr, "More than one output file specified with -o\n");
+				usage(argv[0]);
+				exit(0);
+			}
+			if (optarg == (char *)0)  {
+				fprintf(stderr, "No output file specified\n");
+				usage(argv[0]);
+				exit(0);
+			}
+			output_file = optarg;
+			break;
+		case 'd':
+			if (num_dem >= NUM_DEM)  {
+				fprintf(stderr, "Out of storage for DEM file names (max %d)\n", NUM_DEM);
+				exit(0);
+			}
+			if (optarg == (char *)0)  {
+				fprintf(stderr, "No DEM file specified with -d\n");
+				usage(argv[0]);
+				exit(0);
+			}
+			dem_files[num_dem++] = optarg;
+			break;
+		case 'C':
+			capital_c_flag = 1;
+		case 'c':
+			if (contour_flag != 0)  {
+				fprintf(stderr, "More than one -c or -C option given\n");
+				usage(argv[0]);
+				exit(0);
+			}
+			if (optarg == (char *)0)  {
+				fprintf(stderr, "No contour interval specified with -c\n");
+				usage(argv[0]);
+				exit(0);
+			}
+			contour_intvl = atof(optarg);
+			contour_flag = 1;
+			break;
+		case 'g':
+			if (gnis_file != (char *)0)  {
+				fprintf(stderr, "More than one GNIS file specified\n");
+				usage(argv[0]);
+				exit(0);
+			}
+			if (optarg == (char *)0)  {
+				fprintf(stderr, "No GNIS file specified with -g\n");
+				usage(argv[0]);
+				exit(0);
+			}
+			gnis_file = optarg;
+			break;
+		case 'a':
+			if (attribute_file != (char *)0)  {
+				fprintf(stderr, "More than one attribute file specified\n");
+				usage(argv[0]);
+				exit(0);
+			}
+			if (optarg == (char *)0)  {
+				fprintf(stderr, "No attribute file specified with -a\n");
+				usage(argv[0]);
+				exit(0);
+			}
+			attribute_file = optarg;
+			break;
+		case 'x':
+			if (image_corners.x >= 0)  {
+				fprintf(stderr, "More than one -x value specified\n");
+				usage(argv[0]);
+				exit(0);
+			}
+			if (optarg == (char *)0)  {
+				fprintf(stderr, "No value specified with -x\n");
+				usage(argv[0]);
+				exit(0);
+			}
+			image_corners.x = atoi(optarg);
+			break;
+		case 'y':
+			if (image_corners.y >= 0)  {
+				fprintf(stderr, "More than one -y value specified\n");
+				usage(argv[0]);
+				exit(0);
+			}
+			if (optarg == (char *)0)  {
+				fprintf(stderr, "No value specified with -y\n");
+				usage(argv[0]);
+				exit(0);
+			}
+			image_corners.y = atoi(optarg);
+			break;
+		case 'r':
+			if (relief_factor >= 0.0)  {
+				fprintf(stderr, "More than one -r value specified\n");
+				usage(argv[0]);
+				exit(0);
+			}
+			if (optarg == (char *)0)  {
+				fprintf(stderr, "No value specified with -r\n");
+				usage(argv[0]);
+				exit(0);
+			}
+			relief_factor = atof(optarg);
+			if ((relief_factor < 0.0) || (relief_factor > 1.0))  {
+				fprintf(stderr, "The relief factor given with -r must be a real number between 0 and 1, inclusive.\n");
+				exit(0);
+			}
+			break;
+		case 'm':
+			if (relief_mag != 1.0)  {
+				fprintf(stderr, "More than one -m value specified\n");
+				usage(argv[0]);
+				exit(0);
+			}
+			if (optarg == (char *)0)  {
+				fprintf(stderr, "No value specified with -m\n");
+				usage(argv[0]);
+				exit(0);
+			}
+			relief_mag = atof(optarg);
+			if (relief_mag < 1.0)  {
+				fprintf(stderr, "The relief magnification given with -m must be a real number greater than or equal to 1.\n");
+				exit(0);
+			}
+			break;
+		case 'l':
+			if ((image_corners.sw_lat != 91.0) || (image_corners.sw_long != 181.0) ||
+			    (image_corners.ne_lat != -91.0) || (image_corners.ne_long != -181.0))  {
+				fprintf(stderr, "More than one set of -l values specified\n");
+				usage(argv[0]);
+				exit(0);
+			}
+			if (optarg == (char *)0)  {
+				fprintf(stderr, "No values specified with -l\n");
+				usage(argv[0]);
+				exit(0);
+			}
+			ptr = optarg;
+			if (*ptr != '\0')  {
+				image_corners.sw_lat = strtod(ptr, &ptr);
+			}
+			ptr++;
+			if (*ptr != '\0')  {
+				image_corners.sw_long = strtod(ptr, &ptr);
+			}
+			ptr++;
+			if (*ptr != '\0')  {
+				image_corners.ne_lat = strtod(ptr, &ptr);
+			}
+			ptr++;
+			if (*ptr != '\0')  {
+				image_corners.ne_long = strtod(ptr, &ptr);
+			}
+			if ((image_corners.sw_lat == 91.0) || (image_corners.sw_long == 181.0) ||
+			    (image_corners.ne_lat == -91.0) || (image_corners.ne_long == -181.0))  {
+				fprintf(stderr, "Incomplete set of -l values specified\n");
+				usage(argv[0]);
+				exit(0);
+			}
+			/* Used to check against the limits [-80,84] but GTOPO30 data can fall outside that. */
+			if ((image_corners.sw_lat < -90.0) || (image_corners.sw_lat > 90.0) ||
+			    (image_corners.ne_lat < -90.0) || (image_corners.ne_lat > 90.0))  {
+				fprintf(stderr, "Latitude must fall between -90 and 90 degrees, inclusive\n");
+				usage(argv[0]);
+				exit(0);
+			}
+			if ((image_corners.sw_long < -180.0) || (image_corners.sw_long > 180.0) ||
+			    (image_corners.ne_long < -180.0) || (image_corners.ne_long > 180.0))  {
+				fprintf(stderr, "Longitude must fall between -180 and 180 degrees, inclusive\n");
+				usage(argv[0]);
+				exit(0);
+			}
+			if (image_corners.sw_lat > image_corners.ne_lat)  {
+				f = image_corners.sw_lat;
+				image_corners.sw_lat = image_corners.ne_lat;
+				image_corners.ne_lat = f;
+			}
+			if (image_corners.sw_long > image_corners.ne_long)  {
+				f = image_corners.sw_long;
+				image_corners.sw_long = image_corners.ne_long;
+				image_corners.ne_long = f;
+			}
+			(void)redfearn(&datum, &image_corners.sw_x_gp, &image_corners.sw_y_gp, &image_corners.sw_zone,
+					image_corners.sw_lat, image_corners.sw_long, 1);
+			(void)redfearn(&datum, &image_corners.ne_x_gp, &image_corners.ne_y_gp, &image_corners.ne_zone,
+					image_corners.ne_lat, image_corners.ne_long, 0);
+			lat_flag = 1;
+			break;
+		case 'n':
+			if (optarg == (char *)0)  {
+				fprintf(stderr, "No color table number specified with -n\n");
+				usage(argv[0]);
+				exit(0);
+			}
+			color_table_number = atoi(optarg);
+			if ((color_table_number < 1) || (color_table_number > NUM_COLOR_TABS))  {
+				fprintf(stderr, "Invalid color table number specified with -n, valid range is [1-%d]\n", NUM_COLOR_TABS);
+				usage(argv[0]);
+				exit(0);
+			}
+			break;
+		case 'L':
+			license();
+			exit(0);
+			break;
+		case 'w':
+			seacoast_flag = 1;
+			break;
+		case 'i':
+			info_flag = 1;
+			break;
+		case 'h':
+			height_field_flag = 1;
+			break;
+		case 'z':
+			z_flag = 1;
+			break;
+		case 't':
+			tick_flag = 0;
+			break;
+		default:
+			usage(argv[0]);
+			exit(0);
+			break;
+		}
+	}
+	num_dlg = argc - optind;
+
+	/*
+	 * If info_flag is non-zero, then don't bother checking the other options.
+	 * They will be ignored, except for -d.
+	 */
+	if (info_flag == 0)  {
+		/* Clean up the options. */
+		if (output_file == (char *)0)  {
+			if (height_field_flag != 0)  {
+				output_file = "drawmap.pgm";
+			}
+			else  {
+				output_file = "drawmap.sun";
+			}
+		}
+		if ((image_corners.x < 0) && (num_dem != 1))  {
+			/*
+			 * The user didn't specify an x value.  Provide one that is half
+			 * of full resolution for a 1-degree DEM.
+			 *
+			 * If there is only one DEM file, the x and y values will be selected later, based on its contents.
+			 */
+			if (lat_flag != 0)  {
+				image_corners.x = round(0.5 * (image_corners.ne_long - image_corners.sw_long) * (double)(ONE_DEGREE_DEM_SIZE - 1));
+			}
+			else  {
+				image_corners.x = (ONE_DEGREE_DEM_SIZE - 1) >> 1;
+			}
+			fprintf(stderr, "x-width of actual map area set to %d pixels.  (%d elevation samples)\n",
+					image_corners.x, image_corners.x + 1);
+		}
+		if ((image_corners.x > 0) && (image_corners.x & 1))  {
+			/*
+			 * Odd dimensions are potential problems.  Make them even.
+			 * Absorb the odd-ness in the border.
+			 */
+			right_border++;
+		}
+		if ((image_corners.y < 0) && (num_dem != 1))  {
+			/*
+			 * The user didn't specify a y value.  Provide one that is half
+			 * of full resolution for a 1-degree DEM.
+			 *
+			 * If there is only one DEM file, the x and y values will be selected later, based on its contents.
+			 */
+			if (lat_flag != 0)  {
+				image_corners.y = round(0.5 * (image_corners.ne_lat - image_corners.sw_lat) * (double)(ONE_DEGREE_DEM_SIZE - 1));
+			}
+			else  {
+				image_corners.y = (ONE_DEGREE_DEM_SIZE - 1) >> 1;
+			}
+			fprintf(stderr, "y-height of actual map area set to %d pixels.  (%d elevation samples)\n",
+					image_corners.y, image_corners.y + 1);
+		}
+		if ((image_corners.y > 0) && (image_corners.y & 1))  {
+			/*
+			 * Odd dimensions are potential problems (although not generally in the vertical direction).
+			 * Absorb the odd-ness in the border.
+			 */
+			bottom_border++;
+		}
+		if (((image_corners.x > 0) && (image_corners.x < 4)) || ((image_corners.y > 0) && (image_corners.y < 4)))  {
+			/*
+			 * Avoid nonsensically small x or y.  The reason for this is that
+			 * the code was written under the assumption that the image is at
+			 * least of a certain minimal size.  By checking the size once,
+			 * at the top, we don't have to check it throughout the body of the code.
+			 */
+			fprintf(stderr, "x and or y dimension too small.\n");
+			exit(0);
+		}
+		if ((num_dem != 1) && (lat_flag == 0))  {
+			fprintf(stderr, "The -l option is required unless there is exactly one -d option given.\n");
+			usage(argv[0]);
+			exit(0);
+		}
+		if (contour_intvl <= 0.0)  {
+			fprintf(stderr, "The -c option includes a non-positive contour value (%f).\n", contour_intvl);
+			usage(argv[0]);
+			exit(0);
+		}
+		if (relief_factor < 0.0)  {
+			relief_factor = 1.0;
+		}
+	}
+
+
+	/*
+	 * Set up the rasterfile color map.  See colors.h for a description of the map.
+	 *
+	 * Begin by setting up the initial colors in each color band.
+	 */
+	if (color_table_number == 1)  {
+		color_tab = color_tab_neutral;
+		c_index_sea = C_INDEX_SEA_NEUTRAL;
+	}
+	else if (color_table_number == 2)  {
+		color_tab = color_tab_natural;
+		c_index_sea = C_INDEX_SEA_NATURAL;
+	}
+	else if (color_table_number == 3)  {
+		color_tab = color_tab_textbook;
+		c_index_sea = C_INDEX_SEA_TEXTBOOK;
+	}
+	else if (color_table_number == 4)  {
+		color_tab = color_tab_spiral;
+		c_index_sea = C_INDEX_SEA_SPIRAL;
+	}
+// If you want to define your own color table, add it to colors.h,
+// increase NUM_COLOR_TABS (in colors.h) to 5,
+// and uncomment the following four lines.
+//	else if (color_table_number == 5)  {
+//		color_tab = color_tab_my_table;
+//		c_index_sea = C_INDEX_SEA_MY_TABLE;
+//	}
+
+	for (i = 0; i < MAX_VALID_BANDS; i++)  {
+		map[0][color_tab[i].c_index] = color_tab[i].red;
+		map[1][color_tab[i].c_index] = color_tab[i].green;
+		map[2][color_tab[i].c_index] = color_tab[i].blue;
+	}
+	/* Put black into the unused part of the table. */
+	if (MAX_VALID_BANDS == 14)  {
+		map[0][color_tab[MAX_VALID_BANDS].c_index] = 0;
+		map[1][color_tab[MAX_VALID_BANDS].c_index] = 0;
+		map[2][color_tab[MAX_VALID_BANDS].c_index] = 0;
+	}
+	/* Initialize the special color block to black.  We will put in the individual colors later. */
+	map[0][color_tab[15].c_index] = 0;
+	map[1][color_tab[15].c_index] = 0;
+	map[2][color_tab[15].c_index] = 0;
+
+	/*
+	 * We have the most intense color values inserted into the table.
+	 * Now insert progressively less intense versions of each color.
+	 * Each color decreases in intensity all the way to black.
+	 */
+	for (i = 0; i < 16; i++)  {
+		red = relief_factor * (double)map[0][color_tab[i].c_index] / 15.0;
+		blue = relief_factor * (double)map[1][color_tab[i].c_index] / 15.0;
+		green = relief_factor * (double)map[2][color_tab[i].c_index] / 15.0;
+
+		for (j = 1; j <= 15; j++)  {
+			map[0][color_tab[i].c_index + j] = map[0][color_tab[i].c_index] - (unsigned char)round(((double)j * red));
+			map[1][color_tab[i].c_index + j] = map[1][color_tab[i].c_index] - (unsigned char)round(((double)j * blue));
+			map[2][color_tab[i].c_index + j] = map[2][color_tab[i].c_index] - (unsigned char)round(((double)j * green));
+		}
+		if (relief_factor == 1.0)  {
+			/*
+			 * Make sure that we shade all the way exactly to black when
+			 * the relief factor is at its default value of 1.0.
+			 */
+			map[0][color_tab[i].c_index + 15] = 0;
+			map[1][color_tab[i].c_index + 15] = 0;
+			map[2][color_tab[i].c_index + 15] = 0;
+		}
+	}
+
+	/* Insert miscellaneous colors for drawing roads, streams, and such. */
+	for (i = 0; i < 16; i++)  {
+		map[0][brights[i].c_index] = brights[i].red;
+		map[1][brights[i].c_index] = brights[i].green;
+		map[2][brights[i].c_index] = brights[i].blue;
+	}
+
+
+	/* If an attribute file was specified, then parse it now. */
+	if ((info_flag == 0) && (attribute_file != (char *)0))  {
+		process_attrib(attribute_file);
+	}
+
+
+	/*
+	 * Before we begin processing map data, here is a short lecture on the
+	 * Universal Transverse Mercator (UTM) coordinate system, which is commonly
+	 * used in topographical maps, and by the military (it has been adopted by
+	 * NATO and is used by the US military for ground operations).  UTM coordinates
+	 * take the place of latitude and longitude, which can be cumbersome to deal
+	 * with in the context of a small-area map.
+	 *
+	 * (UTM coordinates are used in the optional-format DLG files, and in the
+	 * 24K DEM files, and there is some reference to them in the 250K DEM files.
+	 * Old-style GNIS files use latitude and longitude, in DDDMMSS format,
+	 * while new ones have both DDDMMSS and decimal degrees.)
+	 *
+	 * The UTM system begins by dividing the earth into 60 zones, each of
+	 * which represents a slice (like a colored panel in a beach ball) that
+	 * spans 6 degrees of longitude.  Zone 1 runs from 180 degrees West
+	 * Longitude to 174 degrees West Longitude.  Zone 2 runs from 175W to
+	 * 168W.  Zone 60 runs from 174E to 180E.
+	 *
+	 * UTM is only used from 84N to 80S.  At the poles, the Universal Polar
+	 * Stereographic (UPS) projection is used.
+	 *
+	 * In each zone, points are represented by rectangular (x,y) coordinates
+	 * that give distances, in meters, from the zone reference point.  This
+	 * reference point is at the intersection of the Equator and the Central
+	 * Meridian (the longitude line that runs down the center of the zone).
+	 * The (x,y) coordinates give the distance in meters to the east and north
+	 * of the reference point.
+	 *
+	 * In order to avoid having negative values for the UTM coordinates,
+	 * some adjustments are made.  In the northern hemisphere, the y
+	 * coordinate is simply measured from zero at the equator, but the
+	 * Central Meridian is assigned a value of 500,000 meters (called a
+	 * false easting), meaning that the distance (to the east) of a
+	 * given point in the zone is the UTM x coordinate minus 500,000.
+	 * In the southern hemisphere, the Central Meridian is again assigned
+	 * a false easting of 500,000 meters; but the equator is no longer
+	 * assigned a value of 0, and rather is assigned a value of 10,000,000
+	 * meters north (called a false northing).
+	 *
+	 * Note that a Mercator projection can be visualized by imagining
+	 * a cylinder, sitting on one of its ends, with a globe inside.
+	 * If a light is shined from, say, the center of the globe, the longitude
+	 * lines will be projected onto the cylinder as vertical lines, and the
+	 * latitude lines will be projected as circles around the cylinder.
+	 * The longitude lines will be evenly spaced, but the latitude lines
+	 * will be farther apart as the latitude increases.  One advantage
+	 * of this projection is that it is conformal, meaning that angles and
+	 * shapes are preserved during the transformation, for any given small
+	 * region of the map.
+	 *
+	 * The Transverse Mercator projection is the same deal, except that the
+	 * cylinder is tipped on its side and the desired Central Meridian is
+	 * aligned so that it is vertical and tangent to the cylinder wall.
+	 * Because of this orientation, shapes and areas near the Central
+	 * Meridian are preserved, while shapes and areas distant from it
+	 * are less accurate, especially when the top and/or bottom of the map
+	 * is close to one of the poles so that the zone slice must be considerably
+	 * stretched to form a rectangular grid.  Within a given UTM zone, however,
+	 * the distortion is relatively small.
+	 *
+	 * UTM is a Transverse Mercator projection, standardized for international
+	 * use.
+	 */
+
+
+	/*
+	 * This large loop processes elevation data in DEM format, SDTS DEM format,
+	 * and GTOPO30 DEM format.  By the time the loop ends, the data from all
+	 * files has all been consolidated into a single internal array, image_in.
+	 *
+	 * Ordinary DEM files have a lot of header information, much of which we
+	 * throw away.  Initially, we simply read in the header and use it to figure
+	 * out which type of DEM file we have, normally either a 1-degree DEM or a
+	 * 7.5-minute DEM.
+	 *
+	 * In 1-degree DEMs, at least for the contiguous 48 United States, the
+	 * elevations are stored as samples separated one from another by 3 arc
+	 * seconds, making it easy to store the data in a latitude/longitude grid.
+	 * In 7.5-minute DEMs, the data samples are separated by 10 meters or 30
+	 * meters, and locations are in terms of UTM coordinates.  These files are
+	 * considerably more difficult to translate onto a latitude/longitude grid.
+	 *
+	 * SDTS files contain the same types of data as DEM files, just in a
+	 * radically different format, spanning multiple files.
+	 *
+	 * GTOPO30 files have samples spaced 30 arc-seconds apart.
+	 * They have yet another special format, so we provide a separate routine
+	 * to convert them into data that looks like it came from a DEM file.
+	 */
+	dem_name[0] = '\0';
+	file_index = 0;
+	smooth_image_flag = 0;
+	if ((info_flag == 0) && (image_corners.x > 0) && (image_corners.y > 0))  {
+		get_short_array(&image_in, image_corners.x, image_corners.y);
+	}
+	while (file_index < num_dem)  {
+		length = strlen(dem_files[file_index]);
+
+		/*
+		 * We begin by figuring out if the file is gzip-compressed or not, and then we open it.
+		 */
+		if ((length > 3) && ((strcmp(&dem_files[file_index][length - 3], ".gz") == 0) ||
+		    (strcmp(&dem_files[file_index][length - 3], ".GZ") == 0)))  {
+			gz_flag = 1;
+			if ((dem_fdesc = buf_open_z(dem_files[file_index], O_RDONLY)) < 0)  {
+				fprintf(stderr, "Can't open %s for reading, errno = %d\n", dem_files[file_index], errno);
+				exit(0);
+			}
+			read_function = buf_read_z;
+		}
+		else  {
+			gz_flag = 0;
+			if ((dem_fdesc = buf_open(dem_files[file_index], O_RDONLY)) < 0)  {
+				fprintf(stderr, "Can't open %s for reading, errno = %d\n", dem_files[file_index], errno);
+				exit(0);
+			}
+			read_function = buf_read;
+		}
+
+		if (info_flag == 0)  {
+			fprintf(stderr, "Processing DEM file:  %s\n", dem_files[file_index]);
+		}
+
+		file_index++;
+
+		sdts_flag = 0;
+		gtopo30_flag = 0;
+		/*
+		 * Files in Spatial Data Transfer System (SDTS) format are markedly
+		 * different from the old DEM files.  (As a side note, there does not
+		 * appear to be a specific name for the DEM format.  Most documents
+		 * just call it DEM format, and use "SDTS DEM", or some equivalent
+		 * when they refer to SDTS formatted files.  I usually just call it
+		 * the ordinary DEM format.
+		 *
+		 * Since SDTS files are so different, we detect them and then do
+		 * all of the initial parsing in a separate function.
+		 *
+		 * We insist that the user specify one, single, SDTS file (with the
+		 * -d option on the command line) for each SDTS DEM layer.
+		 * The file must be the one whose name has the form ????CEL?.DDF
+		 * (or ????cel?.ddf), and it may have a .gz on the end if it is gzip
+		 * compressed.
+		 *
+		 * We allow the files to be gzip-compressed, and they can have either
+		 * ".gz" or ".GZ" on the end.  However, we insist that the rest of
+		 * the file name have consistent case.  That is, if the 'F' or 'f'
+		 * in the ".DDF" or ".ddf" is in a given case, the rest of the file
+		 * had better be in that same case.
+		 *
+		 * If the following "if" test succeeds, we assume we have an SDTS file.
+		 */
+		if (((length >= 15) && (gz_flag != 0) &&
+		     ((strncmp(&dem_files[file_index - 1][length - 7], ".ddf", 4) == 0) ||
+		      (strncmp(&dem_files[file_index - 1][length - 7], ".DDF", 4) == 0))) ||
+		    ((length >= 12) && (gz_flag == 0) &&
+		     ((strcmp(&dem_files[file_index - 1][length - 4], ".ddf") == 0) ||
+		      (strcmp(&dem_files[file_index - 1][length - 4], ".DDF") == 0))))  {
+			/* SDTS file */
+
+			/* Close the file.  We will reopen it in parse_dem_sdts(). */
+			if (gz_flag == 0)  {
+				buf_close(dem_fdesc);
+			}
+			else  {
+				buf_close_z(dem_fdesc);
+			}
+
+			/*
+			 * Check that the file name takes the form that we expect.
+			 */
+			if (((gz_flag != 0) &&
+			     ((strncmp(&dem_files[file_index - 1][length - 11], "ce", 2) != 0) &&
+			      (strncmp(&dem_files[file_index - 1][length - 11], "CE", 2) != 0))) ||
+			    ((gz_flag == 0) &&
+			     (strncmp(&dem_files[file_index - 1][length - 8], "ce", 2) != 0) &&
+			     (strncmp(&dem_files[file_index - 1][length - 8], "CE", 2) != 0)))  {
+				fprintf(stderr, "The file %s looks like an SDTS file, but the name doesn't look right.  Ignoring file.\n", dem_files[file_index - 1]);
+				continue;
+			}
+
+			/*
+			 * The file name looks okay.  Let's launch into the information parsing.
+			 */
+			if (parse_dem_sdts(dem_files[file_index - 1], &dem_a, &dem_c, &dem_datum, gz_flag) != 0)  {
+				continue;
+			}
+
+			sdts_flag = 1;
+		}
+		/*
+		 * Files in GTOPO30 format are in their own format.  It is similar
+		 * to SDTS format in that the data is spread through a number of
+		 * files.  (However, any similarities end there.)  We only need to
+		 * look at two files, the file whose name ends in ".HDR" and the
+		 * file whose name ends in ".DEM".
+		 *
+		 * We insist that the user specify one, single, GTOPO30 file (with the
+		 * -d option on the command line) for each GTOPO30 file collection.
+		 * The file must be the one whose name has the form *.HDR
+		 * (or *.hdr), and it may have a .gz on the end if it is gzip
+		 * compressed.
+		 *
+		 * We allow the files to be gzip-compressed, and they can have either
+		 * ".gz" or ".GZ" on the end.  However, we insist that the rest of
+		 * the file name have consistent case.  That is, if the 'R' or 'r'
+		 * in the ".HDR" or ".hdr" is in a given case, the rest of the file
+		 * had better be in that same case.
+		 *
+		 * If the following "if" test succeeds, we assume we have an GTOPO30 file.
+		 */
+		else if (((length > 7) && (gz_flag != 0) &&
+		     ((strncmp(&dem_files[file_index - 1][length - 7], ".hdr", 4) == 0) ||
+		      (strncmp(&dem_files[file_index - 1][length - 7], ".HDR", 4) == 0))) ||
+		    ((length > 4) && (gz_flag == 0) &&
+		     ((strcmp(&dem_files[file_index - 1][length - 4], ".hdr") == 0) ||
+		      (strcmp(&dem_files[file_index - 1][length - 4], ".HDR") == 0))))  {
+			/* GTOPO30 file */
+
+			/* Close the file.  We will reopen it in parse_gtopo30(). */
+			if (gz_flag == 0)  {
+				buf_close(dem_fdesc);
+			}
+			else  {
+				buf_close_z(dem_fdesc);
+			}
+
+			gtopo30_flag = 1;
+		}
+		else  {
+			/* Not an SDTS file or GTOPO30 file */
+
+			/*
+			 * Some people (in apparent violation of the DEM standards documents) put
+			 * a newline immediately after the last valid data item in a record
+			 * (rather than padding with blanks to make the record 1024 bytes long.
+			 * This may simply be due to blocking the files with the:
+			 *     dd if=inputfilename of=outputfilename ibs=4096 cbs=1024 conv=unblock
+			 * command, and then forgetting to convert them back.
+			 *
+			 * We read the first record (the Type A header record) a byte at a time,
+			 * searching for a newline, trying to determine if this is one of those files.
+			 *
+			 * We attempt to handle such files, but we don't try very hard.  There are
+			 * many ways to add newlines to the files, and some pathological patterns
+			 * will probably cause drawmap to give up and exit.  I didn't deem it worth
+			 * a lot of effort to try to support every possible non-standard file.
+			 */
+			for (i = 0; i < DEM_RECORD_LENGTH; i++)  {
+				if ((ret_val = read_function(dem_fdesc, &buf[i], 1)) != 1)  {
+					fprintf(stderr, "read from DEM file returns %d, expected 1\n", ret_val);
+					exit(0);
+				}
+				if ((buf[i] == '\n') || (buf[i] == '\r'))  {
+					if (read_function == buf_read)  {
+						read_function = get_a_line;
+					}
+					else  {
+						read_function = get_a_line_z;
+					}
+					break;
+				}
+			}
+			/* Set ret_val as if we had done one big read. */
+		        ret_val = i;
+
+
+			/*
+			 * Parse all of the data from the header that we care about.
+			 * Rather than make parse_dem_a() handle variable length
+			 * header records, pad the record out to 1024.
+			 */
+			for (i = ret_val; i < DEM_RECORD_LENGTH; i++)  {
+				buf[i] = ' ';
+			}
+			parse_dem_a(buf, &dem_a, &dem_datum);
+		}
+
+
+		/*
+		 * Depending on the type of data, call the appropriate
+		 * routine to allocate space for the data and read it in.
+		 * Note that we must later free the space pointed to by dem_corners.ptr.
+		 */
+		dem_corners.ptr = (short *)0;
+		if (sdts_flag != 0)  {
+			ret_val = process_dem_sdts(dem_files[file_index - 1], &image_corners, &dem_corners, &dem_a, &dem_datum);
+		}
+		else if (gtopo30_flag != 0)  {
+			ret_val = process_gtopo30(dem_files[file_index - 1], &image_corners, &dem_corners, &dem_a, &dem_datum, info_flag);
+		}
+		else if (dem_a.plane_ref == 0)  {		// Check for Geographic Planimetric Reference System
+			/*
+			 * Note that this function has a side effect:  it converts the
+			 * latitude/longitude code in dem_a.title into all spaces.
+			 * This is done so that the code won't be included as part of
+			 * the DEM name when we capture the DEM name a few lines hence.
+			 * The routine has the additional side effect of setting
+			 * dem_a->zone to a valid value.  The zone field in the DEM file
+			 * header is zero for Geographic DEMs.
+			 *
+			 * Files with this Planimetric Reference System code are:  30-minute, 1-degree, and Alaska DEMs.
+			 * I have no samples of 30-minute files, so I don't know of process_geo_dem will work with
+			 * them.  It should work for 1-degree and Alaska DEMs.
+			 */
+			ret_val = process_geo_dem(dem_fdesc, read_function, &image_corners, &dem_corners, &dem_a, &dem_datum);
+		}
+		else if (dem_a.plane_ref == 1)  {		// Check for UTM Planimetric Reference System
+			/*
+			 * Files with this Planimetric Reference System code are:  7.5-minute DEMs.
+			 */
+			ret_val = process_utm_dem(dem_fdesc, read_function, &image_corners, &dem_corners, &dem_a, &dem_datum);
+
+			/*
+			 * We must choose whether to keep these data in UTM coordinates or
+			 * inverse project them onto a latitude/longitude grid.
+			 *
+			 * We choose here to inverse project onto a latitude/longitude grid.
+			 * This will be done below.
+			 */
+		}
+		else  {
+			fprintf(stderr, "Unsupported Planimetric Reference System (code = %d) in DEM file.  File ignored.\n", dem_a.plane_ref);
+			ret_val = 1;	// Simulate error return from processing function.
+		}
+		if ((sdts_flag == 0) && (gtopo30_flag == 0))  {
+			if (gz_flag == 0)  {
+				buf_close(dem_fdesc);
+			}
+			else  {
+				buf_close_z(dem_fdesc);
+			}
+		}
+
+
+		/*
+		 * Print all of the parsed header data.
+		 */
+//		print_dem_a(&dem_a);
+
+
+		if (info_flag != 0)  {
+			/*
+			 * We only need to print out some information about the DEM file.
+			 * We aren't going to produce an image.
+			 */
+			if (ret_val != 0)  {
+				dem_corners.y = -1;		// If parsing failed, we may not know the y dimension
+			}
+			else  {
+				free(dem_corners.ptr);
+			}
+
+			fprintf(stdout, "%s\t%40.40s\t%g:%g:%g:%g\t%d:%d\t%d:%d\t%s\n",
+					dem_files[file_index - 1], dem_a.title,
+					dem_corners.se_lat, dem_corners.se_long, dem_corners.nw_lat, dem_corners.nw_long,
+					dem_a.min_elev, dem_a.max_elev, dem_a.cols, dem_corners.y,
+					(read_function == get_a_line || read_function == get_a_line_z) ? "linefeeds=yes" : "linefeeds=no");
+			continue;
+		}
+		if (ret_val == 0)  {
+			dem_flag = 1;
+		}
+		else  {
+			continue;
+		}
+
+		/*
+		 * If the user didn't specify an image size, and there is only one DEM,
+		 * initialize the image size from the DEM size.
+		 */
+		if (num_dem == 1)  {
+			/* There was only one DEM file. */
+			if (image_corners.x < 0)  {
+				/*
+				 * The user didn't specify an x value.  Select it to display.
+				 * the single DEM file at full resolution.
+				 */
+				image_corners.x = dem_corners.x - 1;
+
+				fprintf(stderr, "x-width of actual map area set to %d pixels.  (%d elevation samples)\n",
+						image_corners.x, image_corners.x + 1);
+
+				if (image_corners.x & 1)  {
+					/*
+					 * Odd dimensions are potential problems.  Make them even
+					 * by absorbing the odd-ness in the border.
+					 */
+					right_border++;
+				}
+			}
+			if (image_corners.y < 0)  {
+				/*
+				 * The user didn't specify an x value.  Select it to display.
+				 * the single DEM file at full resolution.
+				 */
+				image_corners.y = dem_corners.y - 1;
+
+				fprintf(stderr, "y-width of actual map area set to %d pixels.  (%d elevation samples)\n",
+						image_corners.y, image_corners.y + 1);
+
+				if (image_corners.y & 1)  {
+					/*
+					 * Odd dimensions are potential problems.  Make them even
+					 * by absorbing the odd-ness in the border.
+					 */
+					bottom_border++;
+				}
+			}
+		}
+		/*
+		 * If user did not provide the -l option, then initialize image boundary specifications.
+		 * Note that, in this case, we know there is only a single DEM file, because we
+		 * explicitly checked for this when we checked the input arguments.
+		 * Thus it is safe to simply initialize the image corners from the dem corners.
+		 */
+		if (lat_flag == 0)  {
+			image_corners.sw_y_gp = dem_corners.sw_y_gp;
+			image_corners.sw_lat = dem_corners.sw_lat;
+			image_corners.sw_x_gp = dem_corners.sw_x_gp;
+			image_corners.sw_long = dem_corners.sw_long;
+			image_corners.sw_zone = dem_a.zone;
+
+			image_corners.ne_y_gp = dem_corners.ne_y_gp;
+			image_corners.ne_lat = dem_corners.ne_lat;
+			image_corners.ne_x_gp = dem_corners.ne_x_gp;
+			image_corners.ne_long = dem_corners.ne_long;
+			image_corners.ne_zone = dem_a.zone;
+
+			lat_flag = 1;
+		}
+
+
+		/*
+		 * We at last are sure that we have enough information to allocate space
+		 * for the big DEM data array.  Allocate it now, so that it will
+		 * be ready for use.
+		 */
+		if (image_in == (short *)0)  {
+			get_short_array(&image_in, image_corners.x, image_corners.y);
+		}
+
+
+		/*
+		 * Save the name of the DEM block for later use.
+		 *
+		 * This is more difficult than it might at first appear.
+		 * People put all kinds of free-form text into the beginning
+		 * of a DEM header record.  Only some of it can really be called a
+		 * name.  (For example, there may be latitude/longitude information,
+		 * or various codes describing aspects of the DEM file that people think
+		 * should be remembered but don't have a legitimate place for in the
+		 * standard record structure.)
+		 * In an attempt to get just the name, we assume that it comes first in the record
+		 * (which is not always true), and take everything up until 40 characters
+		 * or until we come across three blanks in a row.
+		 */
+		if (dem_name[0] == '\0')  {
+			i = 0;
+
+			for (j = 0; j < 40; j++)  {
+				if (dem_a.title[j] != ' ')  {
+					/* If the character is not a space, just copy it. */
+					dem_name[i++] = dem_a.title[j];
+				}
+				else  {
+					/* Allow a maximum of two spaces in a row */
+					if ((dem_a.title[j    ] == ' ') &&
+					    (dem_a.title[j + 1] == ' ') &&
+					    (dem_a.title[j + 2] == ' '))  {
+						break;
+					}
+					else  {
+						dem_name[i++] = dem_a.title[j];
+					}
+				}
+			}
+
+			dem_name[i] = '\0';
+		}
+		else  {
+			strcpy(dem_name, "Data from multiple DEM files");
+		}
+
+
+		/*
+		 * Figure out the area of the image that will be covered by this set of DEM file data.
+		 * Fill in that area with data from corners.ptr.
+		 *
+		 * Because the relative sizes can take any ratio (in either the x or y direction)
+		 * we simply choose the point from corners.ptr that lies closest to the relative
+		 * location in the covered area.  The exception to this is when the image is
+		 * being subsampled, in which case we smooth the data to get average representative data points.
+		 * (If the data is being oversampled, we will smooth it later to get rid of the
+		 * checkerboard effect that occurs when whole blocks of the image are at the same
+		 * elevation.)
+		 */
+		latitude1 = max3(-91.0, dem_corners.sw_lat, image_corners.sw_lat);
+		longitude1 = max3(-181.0, dem_corners.sw_long, image_corners.sw_long);
+		latitude2 = min3(91.0, dem_corners.ne_lat, image_corners.ne_lat);
+		longitude2 = min3(181.0, dem_corners.ne_long, image_corners.ne_long);
+		tmp_width = round((double)(dem_corners.x - 1) * (longitude2 - longitude1) /
+				  (dem_corners.ne_long - dem_corners.sw_long));
+		tmp_height = round((double)(dem_corners.y - 1) * (latitude2 - latitude1) /
+				   (dem_corners.ne_lat - dem_corners.sw_lat));
+		tmp_x = round((double)(dem_corners.x - 1) * (longitude1 - dem_corners.sw_long) /
+			      (dem_corners.ne_long - dem_corners.sw_long));
+		tmp_y = (dem_corners.y - 1) - round((double)(dem_corners.y - 1) * (latitude2 - dem_corners.sw_lat) /
+						    (dem_corners.ne_lat - dem_corners.sw_lat));
+
+		x_low = round((double)image_corners.x * (longitude1 - image_corners.sw_long) /
+			      (image_corners.ne_long - image_corners.sw_long));
+		x_high = round((double)(image_corners.x + 1) * (longitude2 - image_corners.sw_long) /
+			       (image_corners.ne_long - image_corners.sw_long));
+		y_low = image_corners.y - round((double)image_corners.y * (latitude2 - image_corners.sw_lat) /
+						(image_corners.ne_lat - image_corners.sw_lat));
+		y_high = image_corners.y + 1 - round((double)image_corners.y * (latitude1 - image_corners.sw_lat) /
+						     (image_corners.ne_lat - image_corners.sw_lat));
+
+		if ((x_low < 0) || (x_high > (image_corners.x + 1)) || (y_low < 0) || (y_high > (image_corners.y + 1)))  {
+			fprintf(stderr, "One of x_low=%d, x_high=%d, y_low=%d, y_high=%d out of range\n",
+				x_low, x_high, y_low, y_high);
+			exit(0);
+		}
+
+// For debugging.
+//		fprintf(stderr, "image_corners.x=%d  image_corners.y=%d  dem_corners.x=%d  dem_corners.y=%d\n     x_low=%d  x_high=%d  y_low=%d  y_high=%d\n",
+//			image_corners.x, image_corners.y, dem_corners.x, dem_corners.y, x_low, x_high, y_low, y_high);
+//		fprintf(stderr, "dem_corners: (%g %g) (%g %g) (%d %d)\n     image_corners: (%g %g) (%g %g) (%d %d)\n     tmp_width=%d   tmp_height=%d   tmp_x=%d   tmp_y=%d\n",
+//			dem_corners.sw_x_gp, dem_corners.sw_y_gp, dem_corners.ne_x_gp, dem_corners.ne_y_gp, dem_corners.x, dem_corners.y,
+//			image_corners.sw_x_gp, image_corners.sw_y_gp, image_corners.ne_x_gp, image_corners.ne_y_gp, image_corners.x, image_corners.y,
+//			tmp_width, tmp_height, tmp_x, tmp_y);
+
+
+		/*
+		 * Calculate some ratios that we use to determine whether or not
+		 * smoothing is required.
+		 *
+		 * If we have DEM data of greater resolution than the target image,
+		 * then we smooth the DEM data (average data points over small areas)
+		 * so that each target image pixel represents an average of the available
+		 * DEM data points for locations near that pixel.  This throws away
+		 * some of the "crispness" of the data, so we don't want do it willy-nilly.
+		 * (However, if the resolutions are very much different, then the
+		 * terrain can look quite peculiar without smoothing, because elevation
+		 * samples from widely-separated areas can be thrown next to each other
+		 * on the image.)
+		 *
+		 * If we have DEM data of lesser resolution than the target image,
+		 * then we smooth the target image to reduce the stairstep effect
+		 * that comes from spreading too little data over too large an area.
+		 * In this case, the data is a little too "crisp", in the sense that
+		 * we don't have enough of it, so we need to spread the available
+		 * data out to fill the desired image.
+		 *
+		 * If the data and image resolution are nearly the same, we don't do
+		 * any smoothing.  Thus we check to make sure that the two resolutions
+		 * differ by at least a certain amount.  For data smoothing, the amount
+		 * is 50%, because we don't want to smear up the data unless we
+		 * have a good reason.  For image smoothing, we are a lot less
+		 * tolerant, because even a relatively small resolution difference can
+		 * create image stairstepping.
+		 *
+		 * The decision of whether or not to smooth is somewhat subjective,
+		 * so our choice may not always make everyone happy.  However, the user
+		 * can always display the data at full resolution if the smoothing results
+		 * don't meet expectations.
+		 *
+		 * We check the x and y resolutions separately, and do the smoothing
+		 * if either direction meets the criterion.
+		 *
+		 * There is still an image glitch that isn't dealt with here.  When
+		 * the resolutions of the target image and the DEM data are close,
+		 * but not identical (roughly within 30% of each other), then there
+		 * may be a tiny checkerboard pattern on the areas of the image that
+		 * represent low-gradient terrain.  This appears to be caused by the
+		 * process by which indexes into the DEM data are derived from indexes
+		 * into the target image.  Since the indexes are approximately congruent,
+		 * (but not quite) a set of image indexes (in, say, the x direction) like:
+		 * 0 1 2 3 4 5 6 7 ...
+		 * can translate into a set of DEM indexes like:
+		 * 0 1 3 4 6 7 9 10 ...
+		 * This means that the target image contains pairs of adjacent elevations
+		 * that come from adjacent locations in the DEM data.  Adjacent to each
+		 * of these pairs (on the target image) are pairs that came from not-quite-
+		 * adjacent data in the DEM data.  This creates small-scale stairstepping
+		 * in the target image, where each pair of elevations is bounded by pairs
+		 * that have small elevation discontinuities.  The result are anomalous
+		 * bands of light or shadow at the discontinuities.  The problem only
+		 * shows up in areas where the elevation is changing slowly (that is, the gradient
+		 * has a small magnitude) because only in those regions does a small elevation
+		 * change result in a relatively large color change.
+		 * I tried various simple things to eliminate this problem, including
+		 * various filters, and even some simple jittering of the data.  None
+		 * of these techniques improved the image enough to be worthwhile
+		 * (at least in my subjective opinion).  Until I figure out a good way
+		 * to approach this problem, the manual page simply says not to select
+		 * nearly-the-same-but-not-the-same source and target resolutions.
+		 * It seems unlikely that people would want to do this very often anyway.
+		 */
+		smooth_data_flag = 0;
+	    	res_x_data = (double)(dem_corners.x - 1) / (dem_corners.ne_long - dem_corners.sw_long);
+		res_x_image = (double)image_corners.x / (image_corners.ne_long - image_corners.sw_long);
+		res_y_data = (double)(dem_corners.y - 1) / (dem_corners.ne_lat - dem_corners.sw_lat);
+		res_y_image = (double)image_corners.y / (image_corners.ne_lat - image_corners.sw_lat);
+		if (((1.5 * res_y_image) < res_y_data) || ((1.5 * res_x_image) < res_x_data))  {
+			smooth_data_flag = 1;
+		}
+		if (((1.05 * res_y_data) < res_y_image) || ((1.05 * res_x_data) < res_x_image))  {
+			smooth_image_flag = 1;
+		}
+
+		/*
+		 * Prepare a smoothing kernel in case we have more data than pixels to display it.
+		 * The kernel is a square, a maximum of 2*SMOOTH_MAX+1 on a side.
+		 *
+		 * Here is one possible kernel, that I have tried:
+		 *    If a kernel element is a distance of sqrt(k*k + l*l) from the
+		 *    center, then its weight is 10*1.5^(-x/2)
+		 *    Implemented by:
+		 *       smooth[k + smooth_size][l + smooth_size] = round(10.0 * pow(1.5, - sqrt(k * k + l * l) / 2.0));
+		 *
+		 * For now, we just take the straight average over the kernel, since it seems to work reasonbly
+		 * well.
+		 *
+		 * The kernel width/height will be 1+2*smooth_size pixels.
+		 * In the calculation of smooth_size, we take the minimum of SMOOTH_MAX,
+		 * pixels_per_degree_resolution_of_source_data_in_y_direction / pixels_per_degree_resolution_of_target_image_in_y_direction - 1, and
+		 * pixels_per_degree_resolution_of_source_data_in_x_direction / pixels_per_degree_resolution_of_target_image_in_x_direction - 1
+		 *
+		 * The more excess data we have, the more source pixels we average to get a single
+		 * data point for the target image.
+		 */
+		if (smooth_data_flag != 0)  {
+			smooth_size = round(min3(SMOOTH_MAX,
+						 -1.0 + res_y_data / res_y_image,
+						 -1.0 + res_x_data / res_x_image));
+			if (smooth_size < 1)  {
+				/*
+				 * If the y resolution and x resolution differ,
+				 * it is possible for one to call for smoothing and the other not.
+				 * This would result in smooth_size = 0, which we don't want.
+				 * We correct that problem here.
+				 */
+				smooth_size = 1;
+			}
+			for (k = -smooth_size; k <= smooth_size; k++)  {
+				for (l = -smooth_size; l <= smooth_size; l++)  {
+					smooth[k + smooth_size][l + smooth_size] = 1;
+				}
+			}
+		}
+
+
+		/*
+		 * This is the loop that transfers the data for a single DEM into the image_in array.
+		 * The image_in array will eventually hold the data from all DEM files given by the user.
+		 *
+		 * Note:  The mapping of DEM data into the image is done by simple linear interpolation
+		 * from the edges of the DEM data.  This is quite straightforward for DEM data that uses
+		 * geographical planimetric coordinates (latitudes and longitudes).  However for 7.5-minute
+		 * DEM data, which use UTM coordinates, we have to map from UTM into latitude/longitude
+		 * coordinates.  This mapping works as follows:
+		 *
+		 *	Use the (i, j) location in the image to determine an accurate latitude/longitude.
+		 *      Map the latitude/longitude into UTM coordinates with the redfearn() function.
+		 *      Use these UTM coordinates, along with the known UTM range of the DEM data,
+		 *          to accurately determine the correct (k, l) point in the DEM data that
+		 *          corresponds most closely to the specified latitude/longitude within the map image.
+		 *      Use that correct point to produce an elevation value to stuff into the
+		 *          (i, j) location in the image.
+		 *
+		 * Technically speaking, this is about as accurate a job as can be done without implementing
+		 * some between-point interpolation.  I have so far resisted using inter-point interpolation in
+		 * drawmap, mostly because it changes the data in ways that are non-obvious to the user.  (Call
+		 * it a personal preference.)  However, 7.5-minute DEMs might benefit from it because they get
+		 * warped and twisted during the conversion to latitude/longitude coordinates.  This sometimes
+		 * results in some diagonal linear artifacts in the map.  Interpolation might (in theory)
+		 * eliminate these.  A potential future feature for drawmap is to provide such interpolation,
+		 * perhaps as a command line option.  Another potential feature is to provide an option to
+		 * plot maps on a UTM grid instead of a latitude/longitude grid.  This would work better
+		 * for 7.5-minute UTM data.
+		 */
+		if ((tmp_width != 0) && (tmp_height != 0))  {
+			for (i = y_low; i < y_high; i++)  {
+				if (dem_a.plane_ref != 1)  {
+					/* Geographic Planimetric coordinates. */
+					k = tmp_y + round((double)(tmp_height * (i - y_low)) / (double)(y_high - 1 - y_low));
+				}
+
+				for (j = x_low; j < x_high; j++)  {
+					if (dem_a.plane_ref != 1)  {
+						/* Geographic planimetric coordinates. */
+						l = tmp_x + round((double)(tmp_width * (j - x_low)) / (double)(x_high - 1 - x_low));
+						if ((l < 0) || (l > (dem_corners.x - 1)) || (k < 0) || (k > (dem_corners.y - 1)))  {
+							fprintf(stderr, "One of l=%d, k=%d out of range, (i=%d, j=%d, tmp_y=%d, tmp_x=%d, tmp_height=%d, tmp_width=%d)\n",
+								l, k, i, j, tmp_y, tmp_x, tmp_height, tmp_width);
+							exit(0);
+						}
+					}
+					else  {
+						/*
+						 * UTM Planimetric coordinates.
+						 *
+						 * Find UTM equivalents of the latitude/longitude represented by (i, j)
+						 * and round those UTM equivalents to the nearest round 10 or 30
+						 * meter increment.  (Whether the increment is 10 or 30 is determined
+						 * by the value in dem_a.x_res or dem_a.y_res.)
+						 *
+						 * Afterward, use these values to interpolate index values for
+						 * the DEM data array.
+						 */
+						(void)redfearn(&dem_datum, &utm_x, &utm_y, &utm_zone,
+							latitude2  - (double)(i - y_low) * (latitude2  - latitude1)  / (double)(y_high - y_low - 1),
+							longitude1 + (double)(j - x_low) * (longitude2 - longitude1) / (double)(x_high - x_low - 1), 0);
+						utm_x = rint(utm_x / dem_a.x_res) * dem_a.x_res;
+						utm_y = rint(utm_y / dem_a.y_res) * dem_a.y_res;
+
+						k = dem_corners.y - 1 - round((((double)dem_corners.y - 1.0) * (utm_y - dem_corners.y_gp_min)) / (dem_corners.y_gp_max - dem_corners.y_gp_min));
+						l = round((((double)dem_corners.x - 1.0) * (utm_x - dem_corners.x_gp_min)) / (dem_corners.x_gp_max - dem_corners.x_gp_min));
+
+						if ((l < 0) || (l > (dem_corners.x - 1)) || (k < 0) || (k > (dem_corners.y - 1)))  {
+							/*
+							 * The data in a 7.5-minute DEM is localized at round-numbered
+							 * UTM values.  Thus, it rarely falls exactly on the boundaries
+							 * of the latitude/longitude bounding box for a DEM.  Thus,
+							 * as we index back and forth across the latitude/longitude
+							 * bounding box, it is not at all uncommon to get index values
+							 * that slop slightly over the edges of the DEM data array.
+							 * Because of this, we don't print a warning message for those
+							 * slop-overs.  We simply ignore them.
+							 */
+							//fprintf(stderr, "One of l=%d, k=%d out of range, (i=%d, j=%d, tmp_y=%d, tmp_x=%d, tmp_height=%d, tmp_width=%d)\n",
+							//	l, k, i, j, tmp_y, tmp_x, tmp_height, tmp_width);
+							continue;
+						}
+					}
+
+
+					if (*(dem_corners.ptr + k * dem_corners.x + l) == HIGHEST_ELEVATION)  {
+						/*
+						 * It is possible, for 7.5-minute DEMs, to have some samples
+						 * at HIGHEST_ELEVATION around the non-rectangular boundaries
+						 * of the DEM data.  Don't attempt copy these into the image array.
+						 */
+						continue;
+					}
+
+					if (smooth_data_flag != 0)  {
+						/*
+						 * We have DEM data whose resolution, in pixels per degree,
+						 * is greater than the resolution of the target image.  Since
+						 * we have excess data, do some smoothing of the data so that
+						 * the elevation of a point in the target image is an average
+						 * over a group of points in the source DEM data.
+						 */
+						sum = 0;
+						sum_count = 0;
+						for (m = -smooth_size; m <= smooth_size; m++)  {
+							for (n = -smooth_size; n <= smooth_size; n++)  {
+								if (((k + m) < 0) || ((k + m) >= dem_corners.y) || ((l + n) < 0) || ((l + n) >= dem_corners.x))  {
+									continue;
+								}
+
+								if (*(sptr = dem_corners.ptr + (k + m) * dem_corners.x + l + n) == HIGHEST_ELEVATION)  {
+									continue;
+								}
+								sum += *sptr * smooth[m + smooth_size][n + smooth_size];
+								sum_count += smooth[m + smooth_size][n + smooth_size];
+
+								/*
+								 * Here, we are trying to find the latitude and longitude of the
+								 * high and low elevation points in the map.
+								 * When there is heavy smoothing, the derived location may
+								 * be pretty approximate.
+								 * Note also that there may be more than one point in the
+								 * map that takes on the highest (or lowest) elevation.
+								 * We only select the first one we find.
+								 *
+								 * It is somewhat inefficient to do these checks here,
+								 * since data points will generally get checked multiple
+								 * times; but doing it here lets us easily associate
+								 * a given DEM data point with values of i and j,
+								 * which give us the latitude/longitude of the point.
+								 */
+								if (*sptr < min_elevation)  {
+									min_elevation = *sptr;
+									min_e_lat = i;
+									min_e_long = j;
+								}
+								if (*sptr > max_elevation)  {
+									max_elevation = *sptr;
+									max_e_lat = i;
+									max_e_long = j;
+								}
+							}
+						}
+						*(image_in + i * (image_corners.x + 1) + j) = round((double)sum / (double)sum_count);
+					}
+					else  {
+						/*
+						 * We have an image that is either one-to-one with the DEM data, or that needs
+						 * more pixels per degree of longitude than the DEM data can supply.
+						 *
+						 * Don't do any smoothing.  Simply pick the nearest
+						 * point from dem_corners.ptr.
+						 *
+						 * If the x and y image size, given by the user, is
+						 * not related by an integer factor to the number of elevation samples
+						 * in the available data, then the image will contain some
+						 * stripe anomalies because the rounding (above) to arrive
+						 * at the k and l values will periodically give two k or
+						 * l values in a row that have the same value.  Since
+						 * the image color at a given point depends on changes in
+						 * elevation around that point, having repeated elevation
+						 * values can result in anomalous flat areas (with a neutral
+						 * color) in an area of generally steep terrain (with generally
+						 * bright or dark colors).  We can do some smoothing later
+						 * in an attempt to lessen this problem.
+						 */
+						if (*(sptr = dem_corners.ptr + k * dem_corners.x + l) == HIGHEST_ELEVATION)  {
+							continue;
+						}
+						*(image_in + i * (image_corners.x + 1) + j) = *sptr;
+
+						/*
+						 * Here, we are trying to find the latitude and longitude of the
+						 * high and low elevation points in the map.
+						 * Note that there may be more than one point in the
+						 * map that takes on the highest (or lowest) elevation.
+						 * We only select the first one we find.
+						 */
+						if (*sptr < min_elevation)  {
+							min_elevation = *sptr;
+							min_e_lat = i;
+							min_e_long = j;
+						}
+						if (*sptr > max_elevation)  {
+							max_elevation = *sptr;
+							max_e_lat = i;
+							max_e_long = j;
+						}
+					}
+				}
+			}
+		}
+		free(dem_corners.ptr);
+	}
+	/*
+	 * If we have reached this point and we still don't know the image dimensions,
+	 * then just give up and exit.  We could put in a big slug of code here
+	 * and come up with some image dimensions, but we have reached the point of
+	 * diminishing returns.
+	 *
+	 * If we reach this point without image dimensions, it probably means that
+	 * the user has provided a single DEM file, but that it falls outside of
+	 * the specified latitude/longitude range.  We could limp along under these
+	 * conditions, and process the DLG or GNIS information (if any), but it
+	 * doesn't seem worthwhile.  It is usually best to localize decisions, to the
+	 * extent possible.  We have violated that rule here, with the laudable goal
+	 * of trying to not force the user to specify image dimensions and latitude/longitude
+	 * ranges.  However, the image-size decision has been smeared over enough of the
+	 * code to make it hard to understand and maintain.  (There are even little bits
+	 * of it slopped over into dem.c.)  It is time to call a halt.  (Perhaps past time.)
+	 */
+	if ((info_flag == 0) && ((image_corners.x < 0) || (image_corners.y < 0)))  {
+		fprintf(stderr, "Image dimensions are ambiguous.  There may be a problem with -l, -x, and/or -y.\n");
+		fprintf(stderr, "If you provide a single DEM file, you can leave out -l, -x, and -y,\n");
+		fprintf(stderr, "and drawmap will choose them for you.\n");
+		exit(0);
+	}
+
+
+	/*
+	 * When dealing with 7.5-minute DEMs, there are sometimes gaps between the data
+	 * for a pair of adjacent DEMs.  This is sometimes because it is difficult to
+	 * choose image dimensions so that there is an exact correspondence between data
+	 * points and image points --- under these conditions, rounding quantization can
+	 * cause a small gap to occur between quads.
+	 * Occasionally, there are also actual gaps between the data in adjacent files.
+	 * Either of these difficulties can result white gaps in the image,
+	 * between the data for adjacent quads.
+	 *
+	 * We fill in these voids by averaging neighboring points that contain valid data.
+	 * We look for spots on the image where a non-valid point has valid points, on either
+	 * side, in diametric opposition.
+	 *
+	 * We stretch out further to the left and right because the quads are generally
+	 * fairly even on top and bottom, but ragged on the left and right.  Thus, any
+	 * gaps usually show up at the vertical joints between quads, and the gaps can
+	 * be two pixels wide, when the joints are particularly ragged.
+	 *
+	 * The purpose of this block of code is to get rid of gaps that occur when
+	 * the target image resolution is about the same as the data resolution.
+	 * (We will call this the resolution-parity case.)  When the target image
+	 * resolution is considerably smaller than the data resolution, then the
+	 * data smoothing (performed above) should eliminate any gaps.  When
+	 * the target image resolution is considerably greater than the data
+	 * resolution, then we have to rely on the image smoothing (performed later)
+	 * to fill the gaps, because the gaps can be magnified in width by oversampling.
+	 * The current block of code falls between the two extremes.  Unlike either of
+	 * the smoothing operations, this code does its job without modifying
+	 * any existing elevation data.  It steps lightly, and only modifies
+	 * the empty regions between blocks of valid data.  (Of course, this
+	 * current block of code may also fill in some gaps that would otherwise
+	 * have been filled in by the image smoothing below.)
+	 *
+	 * We could combine this operation with the image smoothing operation, below,
+	 * but the latter operation is currently written to require a complete extra
+	 * copy of the image.  By doing a separate interpolation operation here, we
+	 * avoid having to double our memory needs for images that have approximate
+	 * resolution parity with the data.  This is important, because such images
+	 * are often quite large, sometimes several times as large as can be displayed on
+	 * a single screen.  Since the image smoothing operation is used when
+	 * a small amount of data is blown up into a larger size, the images there
+	 * are likely to be more than moderate in size, perhaps comparable to the
+	 * size of a display screen.  Thus, in the image-smoothing case, the doubled
+	 * memory requirements are an acceptable trade off for simpler code; while,
+	 * in the resolution-parity case, it is worthwhile to try to minimize memory
+	 * use and thus maximize the size of the allowable maps.
+	 *
+	 *
+	 * We don't want to allocate space for another image, so we allocate space for
+	 * another image row.  We use this temporary space, and the variables s0, s1, and s2,
+	 * to save the data we have already looked at, so that we can change the data
+	 * in the image array itself, but still have a copy of the old data to do our
+	 * searching and averaging with.  This adds a small amount of complexity to
+	 * this block of code, but can greatly decrease our memory needs.
+	 *
+	 * sptr is the pointer to the row we are currently examining and changing.
+	 * sptr_down is a pointer to the next row down the image (the row we will examine next).
+	 * tmp_row holds a pre-change version of the previously examined row.
+	 * s2 holds the pre-change version of the point we are currently looking at.
+	 * s1 holds the pre-change version of the previous point.
+	 * s0 holds the pre-change version of the point before s1.
+	 */
+	if (info_flag == 0)  {
+		tmp_row = (short *)malloc(sizeof(short) * (image_corners.x + 1));
+		if (tmp_row == (short *)0)  {
+			fprintf(stderr, "malloc of tmp_row failed\n");
+			exit(0);
+		}
+		sptr = image_in - image_corners.x - 1;
+		sptr_down = image_in;
+		for (i = 0; i <= image_corners.y; i++)  {
+			sptr += (image_corners.x + 1);
+			sptr_down += (image_corners.x + 1);
+			for (j = 0; j <= image_corners.x; j++)  {
+				s2 = sptr[j];
+				if (s2 == HIGHEST_ELEVATION)  {
+					f = 0.0;
+					k = 0;
+					if ((j > 0) && (j < image_corners.x))  {
+						if ((sptr[j - 1] != HIGHEST_ELEVATION) && (sptr[j + 1] != HIGHEST_ELEVATION))  {
+							f += sptr[j - 1];
+							f += sptr[j + 1];
+							k = k + 2;
+						}
+						if ((i > 0) && (i < image_corners.y))  {
+							if ((tmp_row[j - 1] != HIGHEST_ELEVATION) && (sptr_down[j + 1] != HIGHEST_ELEVATION))  {
+								f += tmp_row[j - 1];
+								f += sptr_down[j + 1];
+								k = k + 2;
+							}
+							if ((tmp_row[j + 1] != HIGHEST_ELEVATION) && (sptr_down[j - 1] != HIGHEST_ELEVATION))  {
+								f += tmp_row[j + 1];
+								f += sptr_down[j - 1];
+								k = k + 2;
+							}
+							if ((j > 1) && (j < (image_corners.x - 1)))  {
+								if ((tmp_row[j - 2] != HIGHEST_ELEVATION) && (sptr_down[j + 2] != HIGHEST_ELEVATION))  {
+									f += tmp_row[j - 2];
+									f += sptr_down[j + 2];
+									k = k + 2;
+								}
+								if ((tmp_row[j + 2] != HIGHEST_ELEVATION) && (sptr_down[j - 2] != HIGHEST_ELEVATION))  {
+									f += tmp_row[j + 2];
+									f += sptr_down[j - 2];
+									k = k + 2;
+								}
+							}
+						}
+						if ((j > 1) && (j < (image_corners.x - 1)))  {
+							if ((sptr[j - 2] != HIGHEST_ELEVATION) && (sptr[j + 2] != HIGHEST_ELEVATION))  {
+								f += sptr[j - 2];
+								f += sptr[j + 2];
+								k = k + 2;
+							}
+						}
+					}
+					if ((i > 0) && (i < image_corners.y) &&
+					    (tmp_row[j] != HIGHEST_ELEVATION) && (sptr_down[j] != HIGHEST_ELEVATION))  {
+						f += tmp_row[j];
+						f += sptr_down[j];
+						k = k + 2;
+					}
+					if (k > 1)  {
+						sptr[j] = f / (double)k;
+					}
+				}
+
+				if (j > 1)  {
+					tmp_row[j - 2] = s0;
+				}
+				s0 = s1;
+				s1 = s2;
+			}
+			tmp_row[j - 2] = s0;
+			tmp_row[j - 1] = s1;
+		}
+		free(tmp_row);
+	}
+
+
+
+	/*
+	 * If the image data has been oversampled (meaning that we have spread too little actual
+	 * DEM data over too many image pixels), then we smooth it out a little so that there isn't
+	 * a checkerboard effect from the sparse data.  The size of the smoothing kernel, and
+	 * its shape, are heuristically chosen to produce pleasing results.  However, the whole
+	 * process is inherently imperfect, so don't expect amazing results.  After all, there
+	 * really isn't any way to accurately interpolate the data that isn't there.  We are just
+	 * trying to get rid of some of the annoying artifacts of the oversampling process.  This
+	 * makes the image look better, but it does so essentially by removing some false data,
+	 * and adding new more-pleasant-looking false data to replace it.
+	 */
+	if (info_flag == 0)  {
+		if ((dem_flag != 0) && (smooth_image_flag != 0))  {
+			/*
+			 * Prepare a smoothing kernel.
+			 * The kernel is a square, a maximum of 2*SMOOTH_MAX+1 on a side.
+			 *
+			 * If a kernel element is a distance of sqrt(k*k + l*l) from the
+			 * center, then its weight is:
+			 * 	smooth[k + smooth_size][l + smooth_size] = round(10.0 * exp(- (k * k + l * l) / (2.0 * (smooth_size / 2.0) * (smooth_size / 2.0))));
+			 *
+			 * This is basically a gaussian distribution, with a mean of zero and a variance of
+			 * (smooth_size / 2.0)^2
+			 *
+			 * The parameters of the equation were chosen by trial and error.
+			 *
+			 * We choose smooth_size in the same way that we chose it above,
+			 * except that the two ratios are inverted.
+			 */
+			smooth_size = round(min3(SMOOTH_MAX, res_y_image / res_y_data, res_x_image / res_x_data));
+			if (smooth_size < 1)  {
+				/*
+				 * If the y resolution and x resolution differ,
+				 * it is possible for one to call for smoothing and the other not.
+				 * This could result in smooth_size = 0, which we don't want.
+				 * We correct that problem here.
+				 */
+				smooth_size = 1;
+			}
+			for (k = -smooth_size; k <= smooth_size; k++)  {
+				for (l = -smooth_size; l <= smooth_size; l++)  {
+					smooth[k + smooth_size][l + smooth_size] = round(10.0 * exp(- (k * k + l * l) / (2.0 * (smooth_size / 2.0) * (smooth_size / 2.0))));
+				}
+			}
+
+			/*
+			 * We need a new block of memory so that we can read the source data
+			 * from one block and write smoothed data into the other.
+			 */
+	//		get_short_array(&image_tmp, image_corners.x, image_corners.y);
+			image_tmp = (short *)malloc(sizeof(short) * (image_corners.y + 1) * (image_corners.x + 1));
+			if (image_tmp == (short *)0)  {
+				fprintf(stderr, "malloc of image_tmp failed\n");
+				exit(0);
+			}
+
+			/*
+			 * Do the smoothing.
+			 *
+			 * Slop over slightly into the areas that are set to HIGHEST_ELEVATION
+			 * so that we can fill in any remaining gaps between 7.5-minute quads.
+			 */
+			for (i = 0; i <= image_corners.y; i++)  {
+				for (j = 0; j <= image_corners.x; j++)  {
+					sum = 0;
+					sum_count = 0;
+					for (m = -smooth_size; m <= smooth_size; m++)  {
+						for (n = -smooth_size; n <= smooth_size; n++)  {
+							if (((i + m) < 0) || ((i + m) > image_corners.y) || ((j + n) < 0) || ((j + n) > image_corners.x))  {
+								continue;
+							}
+
+							sptr = (image_in + (i + m) * (image_corners.x + 1) + j + n);
+							if (*sptr == HIGHEST_ELEVATION)  {
+								continue;
+							}
+							sum += *sptr * smooth[m + smooth_size][n + smooth_size];
+							sum_count += smooth[m + smooth_size][n + smooth_size];
+						}
+					}
+					if (sum_count == 0)  {
+						*(image_tmp + i * (image_corners.x + 1) + j) = HIGHEST_ELEVATION;
+					}
+					else  {
+						*(image_tmp + i * (image_corners.x + 1) + j) = round((double)sum / (double)sum_count);
+					}
+				}
+			}
+
+			free(image_in);
+			image_in = image_tmp;
+		}
+	}
+
+
+
+	/*
+	 * If height_field_flag is non-zero, then we don't generate an image.
+	 * Instead we create a file full of height field information for use
+	 * by other programs, such as the povray ray tracer.
+	 *
+	 * The file is a Portable Graymap (PGM) format file
+	 * which is a simple ASCII dump of the elevation data.
+	 */
+	if ((info_flag == 0) && (height_field_flag != 0))  {
+		if ((pgm_stream = fopen(output_file, "w+")) < 0)  { 
+			fprintf(stderr, "Can't create %s for writing, errno = %d\n", output_file, errno); 
+			exit(0);
+		}
+
+		/*
+		 * We need to recalculate the maximium and minimum elevations
+		 * since they may have been altered by the smoothing
+		 * operations, and we need to print out the new values.
+		 */
+		min_elevation = 100000;
+		max_elevation = -100000;
+		l = 0;
+		k = 0;
+		for (i = 0; i <= image_corners.y; i++)  { 
+			for (j = 0; j <= image_corners.x; j++)  { 
+				if (*(image_in + i * (image_corners.x + 1) + j) == HIGHEST_ELEVATION)  {
+					*(image_in + i * (image_corners.x + 1) + j) = 0;
+					k = 1;
+					continue;
+				}
+				if (*(image_in + i * (image_corners.x + 1) + j) < 0)  {
+					*(image_in + i * (image_corners.x + 1) + j) = 0;
+					l = 1;
+				}
+				if (*(image_in + i * (image_corners.x + 1) + j) > max_elevation)  {
+	                		max_elevation = *(image_in + i * (image_corners.x + 1) + j);
+	        		}
+	        		if (*(image_in + i * (image_corners.x + 1) + j) < min_elevation)  {
+	                		min_elevation = *(image_in + i * (image_corners.x + 1) + j);
+	        		}
+			}
+		}
+		fprintf(stderr, "minimum elevation = %d, maximum elevation = %d%s%s\n", min_elevation, max_elevation,
+				k != 0 ? ",\nSome points that didn't contain valid data had their elevations set to zero." : "",
+				l != 0 ? ",\nSome points with elevations below zero had their elevations set to zero." : "");
+
+		fprintf(pgm_stream, "P2\n");
+		fprintf(pgm_stream, "%d %d %d\n", image_corners.x + 1, image_corners.y + 1, max_elevation);
+
+		for (i = 0; i <= image_corners.y; i++)  { 
+			for (j = 0; j <= image_corners.x; j++)  { 
+// This print statement is for use when you want elevations normalized to 65535.
+//				fprintf(pgm_stream, "%d\n", (int)(65535.0 * (double)(*(image_in + i *
+//					(image_corners.x + 1) + j) - min_elevation) /
+//					(double)(max_elevation == min_elevation ? 0.01 : max_elevation - min_elevation)));
+				fprintf(pgm_stream, "%d\n", *(image_in + i * (image_corners.x + 1) + j));
+			}
+		}
+
+		fprintf(pgm_stream, "# Height-field map of Digital Elevation Model data.  Output by drawmap program.\n");
+		fprintf(pgm_stream, "# %g %g %g %g Latitude/longitude of southeast and northwest corners\n",
+					image_corners.sw_lat, image_corners.ne_long,
+					image_corners.ne_lat, image_corners.sw_long);
+		fprintf(pgm_stream, "# %d %d Mimimum and maximum elevations%s%s\n",
+					min_elevation, max_elevation,
+					k != 0 ? "\n# Some points that didn't contain valid data had their elevations set to zero." : "",
+					l != 0 ? "\n# Some points with elevations below zero had their elevations set to zero." : "");
+
+		fclose(pgm_stream);
+
+		/*
+		 * Produce a povray texture map, suitable for use
+		 * with the height-field data.
+		 */
+		gen_texture(min_elevation, max_elevation, color_tab, output_file);
+
+		exit(0);
+	}
+	if ((info_flag == 0) && (dem_flag != 0))  {
+		fprintf(stderr, "minimum elevation = %ld, maximum elevation = %ld\n", min_elevation, max_elevation);
+	}
+
+
+
+	/*
+	 * Get memory for the actual output image.  We need space for the map itself,
+	 * and for the white borders that go around it.  Note that the code indexes
+	 * through the map portion of the image area as though there were no borders
+	 * around the map.  Then, when the indexes are actually used to index into
+	 * the image_corners.ptr array, the code adds the border widths to the index values
+	 * to arrive at true indices.  This makes the code look messy, but it allows
+	 * us to separate the task of navigating around the map area from the task
+	 * of navigating around the output image area.  This makes it easier (for me)
+	 * to understand what is going on.  On the image, the x index increases toward
+	 * the right and the y index increases going toward the bottom.  latitude
+	 * increases going from bottom to top, and longitude increases going from
+	 * left to right.  (Remember, though, that west longitudes are negative,
+	 * so that 109W is actually smaller than 108W, when treated as -109 and -108.)
+	 *
+	 * The index values (that is, the unbordered-map area index values), in the "y" and
+	 * "x" directions, can each be -1 (when the latitude goes to image_corners.ne_lat or
+	 * the longitude goes to image_corners.sw_long, respectively).
+	 * We allow this negative index because it makes it conceptually easier to
+	 * translate latitudes and longitudes into x and y index values.
+	 * We depend on the borders around the image to absorb these negative values
+	 * so that we don't scribble memory outside the memory assigned to image_corners.ptr.
+	 *
+	 *       Example:
+	 *	 Say that we have an image that covers a 1x1 degree block, and a map area of
+	 *	 1200x1200 pixels.  Thus, we have a map area 1200x1200 pixels in extent, but
+	 *       we have actual DEM data that extends over 1201x1201 samples.  (250K DEM files
+	 *       are mostly 1201x1201 samples in extent.)  Obviously, some
+	 *       of the available data won't fit on the image.  One could just make the image area
+	 *       be 1201x1201 pixels wide, but I chose a different approach.  I make the map area
+	 *       span slightly less than 1 degree by 1 degree, so that some of the DEM data won't
+	 *       fit on the image.   This is accomplished by aligning the DEM data with two edges
+	 *       of the image, and letting the DEM data that slop over the other two edges be discarded.
+	 *
+	 *       It makes sense that we assign image_corners.sw_lat to pixels along
+	 *       the bottom edge of the map area.  It would also make esthetic sense to put
+	 *       image_corners.sw_long along the pixels that run down the left side of the map area.
+	 *       However, early in development (when I was still treating west longitudes as
+	 *       positive numbers) I decided to put image_corners.ne_long down the right-hand side of
+	 *       the map area. (When I was treating west longitude as a positive number, the
+	 *       roles of image_corners.sw_long and image_corners.ne_long were reversed.)  I decided not
+	 *       to change this convention when I started using negative longitudes.
+	 *       Thus, in the current program, the point represented by image_corners.sw_lat/image_corners.ne_long
+	 *       is exactly the bottom right-hand corner of the map area, with map-area (x, y)
+	 *       index values of (1199, 1199).  image_corners.ne_lat/image_corners.sw_long is just outside the upper
+	 *       left corner of the map area, with map-area (x, y) index values of (-1, -1).
+	 *
+	 *	 If you think about it, it makes sense that image_corners.ne_lat and image_corners.sw_long are actually
+	 *	 outside the image space.  Adjacent DEM files overlap by the width of one elevation sample
+	 *       along their common boundary.  Thus, it is natural to assign the boundary to one of the 1x1
+	 *       degree blocks but not to the other.  I have chosen to include the boundaries along the
+	 *       bottom and right-hand sides of each DEM block, and to assign the other two boundaries to
+	 *       adjacent DEM blocks.  Thus, if you display only a single DEM block, the left-hand and top
+	 *       edges of the DEM data won't appear, since they fall at index values of -1.  Actually, they
+	 *       wouldn't appear in any case.  The raw DEM data is converted into image data by cycling
+	 *       through all of the DEM points and finding the elevation gradient at each point by taking
+	 *       differences between adjacent elevation samples.  This gradient operation reduces the
+	 *       1201x1201 DEM array to a 1200x1200 set of image points.  Thus, the gradient operation
+	 *       gobbles up either the top or bottom row, and either the left or right column, of the raw
+	 *       DEM data.  I arbitrarily chose to gobble up the top row and left column.  The resulting
+	 *       array of image points exactly fits into the available 1200x1200 image array.  Thus, two
+	 *       edges of the DEM block would naturally fall on the negative index values, but they don't
+	 *       because we discard them in the gradient operation.
+	 *
+	 *       So why make such a fuss about the negative index values, since they don't get used
+	 *       anyway?  The answer lies in the handling of DLG data.  Those data aren't array-based
+	 *       the way the DEM data are.  Instead, they are vector data --- that is, they are sequences
+	 *       of points that define piecewise-linear "curves" (like roads, streams, and so on) on the
+	 *       map.  The points are given in terms of UTM coordinates, but we convert them to
+	 *       latitude/longitude coordinates for entry on the map.  (I could have defined the map area
+	 *       in terms of UTM coordinates instead of latitude/longitude coordinates, but latitudes and
+	 *       longitudes seemed more natural --- particularly since a map could span multiple UTM zones,
+	 *       which could get confusing.)  The subroutines that plot DLG data make use of the negative
+	 *       index values, since roads and such can go right up to the edge of the map.  Thus, when
+	 *       all of the DLG data are plotted, there will generally be roads and streams that overlap
+	 *       the two white strips represented by the negative index values.  To clean up these two
+	 *       strips, we fill them in with WHITE after the DLG plotting is done.
+	 *
+	 *	 If you look closely at a map for a 1 degree by 1 degree block, you
+	 *	 will note that the tick marks for the low longitude and high latitude
+	 *	 are actually one pixel beyond the edge of the map area, at the locations
+	 *	 that would be specified by horizontal and vertical indices of -1.
+	 */
+	if (info_flag == 0)  {
+		x_prime = image_corners.x + LEFT_BORDER + right_border;
+		image_corners.ptr = (unsigned char *)malloc((image_corners.y + TOP_BORDER + bottom_border) * x_prime);
+		if (image_corners.ptr == (unsigned char *)0)  {
+			fprintf(stderr, "malloc of image_corners.ptr failed\n");
+			exit(0);
+		}
+	}
+
+
+
+	/*
+	 * For areas of flat terrain, most of the color table goes unused,
+	 * and the shaded relief is pretty boring, with only a few colors
+	 * (or even only a single color) and not much shading.  When the
+	 * "-z" option is given by the user, we adjust the elevation
+	 * thresholds in the color table so that the full color table is
+	 * used to span the elevations between min_elevation and max_elevation.
+	 */
+	if (z_flag != 0)  {
+		for (k = 0; k < (MAX_VALID_BANDS - 1); k++)  {
+			i = min_elevation < 0 ? 0 : min_elevation;
+			color_tab[k].max_elevation = i + round((double)((k + 1) * (max_elevation - i)) / (double)MAX_VALID_BANDS);
+		}
+		color_tab[MAX_VALID_BANDS - 1].max_elevation = HIGHEST_ELEVATION;
+	}
+
+
+	/*
+	 * Do the big calculation for processing the DEM data.
+	 *
+	 * This is where we transform elevation data into pixel colors,
+	 * or elevation contours, in the output image.
+	 *
+	 * Note that the zeroeth row and zeroeth column of the elevation data
+	 * (in image_in) are discarded during this process.  They consist of the
+	 * data that would be plotted at the -1 horizontal and vertical index values
+	 * in image_corners.ptr.
+	 */
+	if (info_flag == 0)  {
+		if (contour_flag == 0)  {
+			/*
+			 * Produce a shaded relief map.
+			 */
+			for (i = 1; i <= image_corners.y; i++)  {
+				for (j = 1; j <= image_corners.x; j++)  {
+					/*
+					 * When producing shaded relief, we vary the shade of the DEM data to
+					 * correspond to the gradient of the terrain at each point.  The gradient
+					 * calculations determine the slope in two directions and choose the
+					 * largest of the two.
+					 *
+					 * The basic idea is to assume that the sun is shining from the northwest
+					 * corner of the image.  Then, terrain with a negative gradient (toward
+					 * the northwest or west) will be brightly colored, and terrain with a
+					 * positive gradient will be dark, and level terrain will be somewhere in between.
+					 *
+					 * In order to find the gradient, the numerator is the difference in elevation
+					 * between two adjacent pixels.  The denominator is the horizontal ground distance
+					 * between the locations represented by those two pixels.  In the DEM data,
+					 * elevations are expressed in meters.  We also need to find the ground distance
+					 * in meters.
+					 *
+					 * We can readily find the ground distance in terms of degrees (of latitude/longitude)
+					 * per pixel.  We do that now, using the geometry of the target image.
+					 * (Note that this calculation is pretty bogus, because we are treating latitudes
+					 * and longitudes as rectangular coordinates.  However, we only need a crude
+					 * result since the goal is to produce color shadings that give a subjective
+					 * view of the gradient of the terrain.  We aren't trying to make the shadings
+					 * correspond exactly to some gradient metric --- we are only trying to give the
+					 * impression of a gradient.)
+					 */
+					res_y = (double)(image_corners.ne_lat - image_corners.sw_lat) / (double)image_corners.y;
+					res_xy = sqrt((pow(image_corners.ne_lat - image_corners.sw_lat, 2.0) + pow(image_corners.ne_long - image_corners.sw_long, 2.0)) /
+							    (pow((double)image_corners.x, 2.0) + pow((double)image_corners.y, 2.0)));
+
+					/*
+					 * Now we need to convert our ground distance, in degrees per pixel,
+					 * into a distance in meters per pixel.  This requires that we know
+					 * how many meters per degree a latitude/longitude respresents.
+					 *
+					 * 4.0076594e7 meters is the equatorial circumference of the earth.
+					 * 3.9942e7 meters is the polar circumference of the earth.
+					 *
+					 * Thus, along the equator, there are 1.1132e5 meters per degree.
+					 * Along a line of longitude, there are 1.1095e5 meters per degree.
+					 * (The Earth has a slightly irregular shape, so these numbers are to
+					 * a first approximation only.)
+					 * The latter number should be reasonably accurate for any latitude,
+					 * anywhere on the earth.  The former number is only accurate near
+					 * the equator.  As we move further north or south, the number changes
+					 * according to the cosine of the latitude:  1.1132e5 * cos(latitude).
+					 *
+					 * Thus, we need to multiply res_y by 1.1095e5 to get the resolution
+					 * in terms of meters per pixel.  For res_xy, we use a more complicated
+					 * factor:
+					 *    sqrt((1.1095e5)^2 + (1.1132e5 * cos(latitude))^2)
+					 */
+					res_y *= 1.1095e5;
+					/*
+					 * f is the latitude (in degrees), found by interpolation.
+					 * We still need to convert it to radians, which we do inside
+					 * the cosine function call.
+					 */
+					f = image_corners.ne_lat - ((double)i / (double)image_corners.y) * (image_corners.ne_lat - image_corners.sw_lat);
+					res_xy *= sqrt(pow(1.1095e5, 2.0) + pow(1.1132e5 * cos(f * M_PI / 180.0), 2.0));
+
+					/*
+					 * Now we are ready to find the gradients.
+					 * However, if we are at the edge of the image and one or more of the
+					 * gradient points is invalid, then don't find the gradient.
+					 * Just set that point in the map image to WHITE.
+					 */
+					sptr = image_in + (i - 1) * (image_corners.x + 1) + j;
+					sptr2 = image_in + i * (image_corners.x + 1) + j;
+					if ((*sptr == HIGHEST_ELEVATION) || (*(sptr - 1) == HIGHEST_ELEVATION) ||
+					    (*sptr2 == HIGHEST_ELEVATION))  {
+						*(image_corners.ptr + (i - 1 + TOP_BORDER) * x_prime + j - 1 + LEFT_BORDER) = WHITE;
+						continue;
+					}
+					else  {
+						gradient1 = (((double)*(sptr - 1)) - ((double)*sptr2)) / res_xy;
+						gradient2 = (((double)*sptr) - ((double)*sptr2)) / res_y;
+						gradient3 = -10000000000.0;
+						gradient = relief_mag * max3(gradient1, gradient2, gradient3);
+
+						factor = get_factor(gradient);
+//						histogram[factor]++;	/* Information for debugging. */
+					}
+
+
+					/*
+					 * Set the color based on the elevation and the factor
+					 * retrieved from the gradient calculations.
+					 * This is called a "factor" for historical reasons.
+					 * At one time, I experimented with finding a multiplicative
+					 * factor instead of the current additive modifier.
+					 * It wasn't worth going through the code and changing the name.
+					 * Besides, I might want to try a factor again someday.
+					 *
+					 * See the file "colors.h" for a description of the color
+					 * scheme.  The information is collected there so that it
+					 * is easy to change the color scheme, if desired.
+					 *
+					 * We do a few special cases and then launch into a loop to
+					 * check the bulk of the cases.
+					 */
+					if (*(sptr = image_in + i * (image_corners.x + 1) + j) < 0)  {
+						/*
+						 * Elevations can theoretically be less than 0, but it's unusual, so report it.
+						 * Below sea level, we shade everything with CYAN.
+						 */
+//						fprintf(stderr, "An elevation was less than 0:  %d\n", *(image_in + i * (image_corners.x + 1) + j));
+
+						*(image_corners.ptr + (i - 1 + TOP_BORDER) * x_prime + j - 1 + LEFT_BORDER) = c_index_sea + factor;
+					}
+					else if (*sptr == 0)  {
+						/*
+						 * Special case for sea level.  If things are totally flat,
+						 * assume it's water.  Otherwise treat it like it's Death Valley.
+						 *
+						 * The reason for this special case is that the DLG files for coastal regions
+						 * don't appear to treat oceans as bodies of water.  This was resulting
+						 * in the ocean areas being set to GREEN (the normal color for sea-level land).
+						 * Thus, I kludged in this special check; and it appears to work fine, in general.
+						 *
+						 * I later made it an option since, for example, sacramento-w.gz gets colored
+						 * oddly, because there are areas below sea level within areas that meet the
+						 * criterion for ocean.
+						 */
+						if (seacoast_flag != 0)  {
+							if (gradient == 0.0)  {
+								*(image_corners.ptr + (i - 1 + TOP_BORDER) * x_prime + j - 1 + LEFT_BORDER) = B_BLUE;
+							}
+							else  {
+								*(image_corners.ptr + (i - 1 + TOP_BORDER) * x_prime + j - 1 + LEFT_BORDER) = c_index_sea + factor;
+							}
+						}
+						else  {
+							*(image_corners.ptr + (i - 1 + TOP_BORDER) * x_prime + j - 1 + LEFT_BORDER) = C_INDEX_0 + factor;
+						}
+					}
+					else if (*sptr == HIGHEST_ELEVATION)  {
+						/*
+						 * Special case for creating WHITE areas by setting the
+						 * DEM elevation data to exactly HIGHEST_ELEVATION.
+						 */
+						*(image_corners.ptr + (i - 1 + TOP_BORDER) * x_prime + j - 1 + LEFT_BORDER) = WHITE;
+					}
+					else  {
+						for (k = 0; k < MAX_VALID_BANDS; k++)  {
+							if (*sptr <= color_tab[k].max_elevation)  {
+								*(image_corners.ptr + (i - 1 + TOP_BORDER) * x_prime + j - 1 + LEFT_BORDER) = color_tab[k].c_index + factor;
+								break;
+							}
+						}
+					}
+				}
+			}
+		}
+		else  {
+			/*
+			 * Instead of a shaded relief map, produce a contour map.
+			 * Note that some regions have hypsographic DLG files
+			 * that can be used to produce a contour map.  However, these
+			 * tend to be too dense for my taste, and it seems easier to produce
+			 * a contour map from scratch than to try to winnow out the
+			 * relevant chunks from a DLG file.  Producing the contour maps
+			 * from scratch has the added advantage that it works even if
+			 * there is no DLG hypsography data available.
+			 */
+
+			/*
+			 * In this pair of nested loops, we round all of the elevation
+			 * data to the nearest contour interval.
+			 */
+			for (i = 0; i <= image_corners.y; i++)  {
+				for (j = 0; j <= image_corners.x; j++)  {
+					contour_trunc = floor((double)*(image_in + i * (image_corners.x + 1) + j) / contour_intvl);
+					*(image_in + i * (image_corners.x + 1) + j) = (short)round(ceil(contour_trunc * (double)contour_intvl));
+				}
+			}
+
+
+			/*
+			 * In this pair of nested loops, we use the rounded elevation
+			 * data to produce a set of contours.  The algorithm is simple:
+			 * If the elevation at the center of a 3x3 square is greater than
+			 * at any of the locations on the border of the square, then we
+			 * plot an L_ORANGE contour point.  Otherwise, we make the point WHITE
+			 * (if capital_c_flag==0), or set the point to a color from the color table
+			 * (if capital_c_flag!=0) where the colors are chosen by rotation.
+			 */
+			for (i = 1; i < image_corners.y; i++)  {
+				for (j = 1; j < image_corners.x; j++)  {
+					k = *(image_in + (i    ) * (image_corners.x + 1) + j    );
+
+					if ((k > (*(image_in + (i - 1) * (image_corners.x + 1) + j - 1))) ||
+					    (k > (*(image_in + (i - 1) * (image_corners.x + 1) + j    ))) ||
+					    (k > (*(image_in + (i - 1) * (image_corners.x + 1) + j + 1))) ||
+					    (k > (*(image_in + (i    ) * (image_corners.x + 1) + j - 1))) ||
+					    (k > (*(image_in + (i    ) * (image_corners.x + 1) + j + 1))) ||
+					    (k > (*(image_in + (i + 1) * (image_corners.x + 1) + j - 1))) ||
+					    (k > (*(image_in + (i + 1) * (image_corners.x + 1) + j    ))) ||
+					    (k > (*(image_in + (i + 1) * (image_corners.x + 1) + j + 1))))  {
+						*(image_corners.ptr + (i - 1 + TOP_BORDER) * x_prime + j - 1 + LEFT_BORDER) = L_ORANGE;
+					}
+					else  {
+						if (capital_c_flag == 0)  {
+							*(image_corners.ptr + (i - 1 + TOP_BORDER) * x_prime + j - 1 + LEFT_BORDER) = WHITE;
+						}
+						else  {
+							/*
+							 * We divide by (MAX_VALID_BANDS-1), rather than by MAX_VALID_BANDS,
+							 * so as to exclude the color in slot MAX_VALID_BANDS.
+							 * Since this color is normally bright white, which
+							 * can be a bit intrusive, we exclude it on esthetic grounds.
+							 */
+							k = round(floor((double)k / contour_intvl)) % (MAX_VALID_BANDS - 1);
+							*(image_corners.ptr + (i - 1 + TOP_BORDER) * x_prime + j - 1 + LEFT_BORDER) = color_tab[k].c_index;
+						}
+					}
+				}
+			}
+
+			/*
+			 * Set the pixels along the right side and bottom of the image to WHITE.
+			 * They have not yet had a color defined.
+			 *
+			 * Note that this is only half a loaf.  If the DEM data supplied by the user
+			 * doesn't cover the latitude/longitude area spanned by the image, then there
+			 * will be WHITE areas within the image.  Unfortunately, there will normally
+			 * be incorrect contour lines around the boundaries of these WHITE areas because
+			 * the elevation in each WHITE area is initialized to HIGHEST_ELEVATION.  Thus,
+			 * there will be a discontinuity in elevation between areas with valid DEM
+			 * data and areas without valid DEM data.  This produces a contour line
+			 * at the boundary.  This is a bummer, but it is a minor cosmetic bummer,
+			 * and I'm not in the mood to fix it at this time.
+			 */
+			for (i = 0; i <= image_corners.y; i++)  {
+				*(image_corners.ptr + (i - 1 + TOP_BORDER) * x_prime + image_corners.x - 1 + LEFT_BORDER) = WHITE;
+			}
+			for (j = 0; j <= image_corners.x; j++)  {
+				*(image_corners.ptr + (image_corners.y - 1 + TOP_BORDER) * x_prime + j - 1 + LEFT_BORDER) = WHITE;
+			}
+		}
+		free(image_in);
+	}
+
+
+	/*
+	 * Process any DLG files.
+	 * These files contain line and area information, for drawing
+	 * things like streams, roads, boundaries, lakes, and such.
+	 */
+	file_index = 0;
+	while (file_index < num_dlg)  {
+		length = strlen(argv[optind + file_index]);
+
+		if ((length > 3) && ((strcmp(&argv[optind + file_index][length - 3], ".gz") == 0) ||
+		    (strcmp(&argv[optind + file_index][length - 3], ".GZ") == 0)))  {
+			gz_flag = 1;
+		}
+		else  {
+			gz_flag = 0;
+		}
+
+		/*
+		 * Files in Spatial Data Transfer System (SDTS) format are markedly
+		 * different from the optional-format DLG files.
+		 *
+		 * Since SDTS files are so different, we must detect them handle
+		 * them separately.
+		 *
+		 * We insist that the user specify one, single, SDTS file on the command
+		 * line for each SDTS DLG directory.  The file must be the one whose
+		 * name has the form ????LE??.DDF (or ????le??.ddf), and it may have
+		 * a .gz on the end if it is gzip compressed.
+		 *
+		 * We allow the files to be gzip-compressed, and they can have either
+		 * ".gz" or ".GZ" on the end.  However, we insist that the rest of
+		 * the file name have consistent case.  That is, if the 'f' or 'F'
+		 * in the ".DDF" or ".ddf" is in a given case, the rest of the file
+		 * had better be in that same case.
+		 *
+		 * If the following "if" test succeeds, we assume we have an SDTS file.
+		 */
+		if (((length >= 15) && (gz_flag != 0) &&
+		     ((strncmp(&argv[optind + file_index][length - 7], ".ddf", 4) == 0) ||
+		      (strncmp(&argv[optind + file_index][length - 7], ".DDF", 4) == 0))) ||
+		    ((length >= 12) && (gz_flag == 0) &&
+		     ((strcmp(&argv[optind + file_index][length - 4], ".ddf") == 0) ||
+		      (strcmp(&argv[optind + file_index][length - 4], ".DDF") == 0))))  {
+			/* SDTS file */
+
+			/*
+			 * Check that the file name takes the form that we expect.
+			 */
+			if (((gz_flag != 0) &&
+			     (strncmp(&argv[optind + file_index][length - 11], "le", 2) != 0) &&
+			     (strncmp(&argv[optind + file_index][length - 11], "LE", 2) != 0)) ||
+			    ((gz_flag == 0) &&
+			     (strncmp(&argv[optind + file_index][length - 8], "le", 2) != 0) &&
+			     (strncmp(&argv[optind + file_index][length - 8], "LE", 2) != 0)))  {
+				fprintf(stderr, "The file %s looks like an SDTS file, but the name doesn't look right.  Ignoring file.\n", argv[optind + file_index]);
+				file_index++;
+				continue;
+			}
+
+			/* If info_flag is nonzero, then just print some info about the DLG file. */
+			if (info_flag == 0)  {
+				fprintf(stderr, "Processing DLG file:  %s\n", argv[optind + file_index]);
+			}
+			else  {
+				fprintf(stdout, "%s", argv[optind + file_index]);
+			}
+
+			/*
+			 * The file name looks okay.  Let's launch into the information parsing.
+			 */
+			(void)process_dlg_sdts(argv[optind + file_index], (char *)0, gz_flag, &image_corners, info_flag, 0);
+		}
+		else  {
+			/* Not an SDTS file. */
+
+			if (gz_flag != 0)  {
+				if ((dlg_fdesc = buf_open_z(argv[optind + file_index], O_RDONLY)) < 0)  {
+					fprintf(stderr, "Can't open %s for reading, errno = %d\n", argv[optind + file_index], errno);
+					exit(0);
+				}
+			}
+			else  {
+				if ((dlg_fdesc = buf_open(argv[optind + file_index], O_RDONLY)) < 0)  {
+					fprintf(stderr, "Can't open %s for reading, errno = %d\n", argv[optind + file_index], errno);
+					exit(0);
+				}
+			}
+
+			/* If info_flag is nonzero, then just print some info about the DLG file. */
+			if (info_flag == 0)  {
+				fprintf(stderr, "Processing DLG file:  %s\n", argv[optind + file_index]);
+			}
+			else  {
+				fprintf(stdout, "%s", argv[optind + file_index]);
+			}
+
+			/*
+			 * With the DEM files, we parsed the header first, and then
+			 * called a separate processing function, and then did some
+			 * more processing here in the main body of drawmap.  DLG files are
+			 * more complicated to parse, and we don't need to return any DLG
+			 * data to this main processing loop.  Thus, we just encapsulate
+			 * all parsing and processing into a single function call.
+			 */
+			process_dlg_optional(dlg_fdesc, gz_flag, &image_corners, info_flag);
+
+			if (gz_flag == 0)  {
+				buf_close(dlg_fdesc);
+			}
+			else  {
+				buf_close_z(dlg_fdesc);
+			}
+		}
+
+		file_index++;
+	}
+	if (info_flag != 0)  {
+		exit(0);
+	}
+
+
+	/* Select a font size, based on the image size. */
+	if ((image_corners.x >= 1000) && (image_corners.y >= 1000))  {
+		font_width = 6;
+		font_height = 10;
+		font = &font_6x10[0][0];
+	}
+	else  {
+		font_width = 5;
+		font_height = 8;
+		font = &font_5x8[0][0];
+	}
+
+
+	/*
+	 * Process any GNIS data.
+	 * GNIS data consists of place names, with specific latitude/longitude
+	 * coordinates, and other data.  We put a cursor at each given location
+	 * and add the place name beside it.
+	 */
+	if (gnis_file != (char *)0)  {
+		if (strcmp(gnis_file + strlen(gnis_file) - 3, ".gz") == 0)  {
+			gz_flag = 1;
+			if ((gnis_fdesc = buf_open_z(gnis_file, O_RDONLY)) < 0)  {
+				fprintf(stderr, "Can't open %s for reading, errno = %d\n", gnis_file, errno);
+				exit(0);
+			}
+		}
+		else  {
+			gz_flag = 0;
+			if ((gnis_fdesc = buf_open(gnis_file, O_RDONLY)) < 0)  {
+				fprintf(stderr, "Can't open %s for reading, errno = %d\n", gnis_file, errno);
+				exit(0);
+			}
+		}
+
+		fprintf(stderr, "Processing GNIS file:  %s\n", gnis_file);
+
+		while ( 1 )  {
+			/*
+			 * There are two kinds of GNIS files at the http://mapping.usgs.gov/
+			 * web site.  I call them old-style and new-style, because for years the
+			 * old-style files were all that were available; and then, in 1998, the new-style
+			 * files appeared as well.  In the old-style files each record is fixed length
+			 * (147 bytes with a newline, or 148 bytes with a newline and carriage return).
+			 * The fields are at fixed positions within this record, with white-space padding
+			 * between the fields.  Here is a sample (I added the <> delimiters at the beginning
+			 * and end of the record):
+			 *
+			 * <MT Blue Mountain Saddle                               locale    Missoula        464828N 1141302W                    5640 Blue Mountain             >
+			 *
+			 * New style records are similar, but have delimiters of the form ',' as shown in
+			 * this sample:
+			 *
+			 * <"MT","Blue Mountain Saddle","locale","Missoula","30","063","464828N","1141302W","46.80778","-114.21722","","","","","5640","","Blue Mountain">
+			 *
+			 * We attempt to handle both formats here.
+			 *
+			 *
+			 * HISTORICAL NOTE:
+			 * Apparently, early in 2000 (although I am not sure exactly when), the format of both
+			 * the old and new style GNIS files changed.  Here (simply for historical completeness)
+			 * are some samples of the pre-change versions:
+			 * <Blue Mountain Saddle                                                                                locale   Missoula                           30063464828N1141302W                 5640Blue Mountain                                        >
+			 * <Blue Mountain Saddle','locale','Missoula','30','063','464828N','1141302W','46.80778','-114.21722','','','','','5640','','Blue Mountain                                                                                                               >
+			 * Beginning with drawmap version 1.10, these older versions are no longer handled.
+			 */
+			if (gz_flag == 0)  {
+				if ((ret_val = get_a_line(gnis_fdesc, buf, MAX_GNIS_RECORD - 1)) <= 0)  {
+					break;
+				}
+			}
+			else  {
+				if ((ret_val = get_a_line_z(gnis_fdesc, buf, MAX_GNIS_RECORD - 1)) <= 0)  {
+					break;
+				}
+			}
+			buf[ret_val] = '\0';
+			/* Strip off trailing CR and/or LF */
+			if ((buf[ret_val - 1] == '\n') || (buf[ret_val - 1] == '\r'))  {
+				ret_val--;
+				buf[ret_val] = '\0';
+			}
+			if ((buf[ret_val - 1] == '\n') || (buf[ret_val - 1] == '\r'))  {
+				ret_val--;
+				buf[ret_val] = '\0';
+			}
+
+			/*
+			 * We need to figure out whether it is an old-style or new-style record.
+			 */
+			if ((tok_ptr = strstr(buf, "\",\"")) != (unsigned char *)0)  {
+				/* New-style record. */
+				if ((tok_ptr + 3) < (buf + ret_val))  {
+					tok_ptr += 3;
+					gnis_feature_name = tok_ptr;
+				}
+				else  {
+					fprintf(stderr, "Defective GNIS record:  <%s>\n", buf);
+					continue;
+				}
+				for (i = 0; i < 7; i++)  {
+					if (((tok_ptr = strstr(tok_ptr, "\",\"")) != (unsigned char *)0) && (*tok_ptr != '\0'))  {
+						if (i == 0)  {
+							/*
+							 * Capture the feature name for later use.
+							 * Skip over the state name at the front.
+							 */
+							length = tok_ptr - gnis_feature_name;
+						}
+						if ((tok_ptr + 3) < (buf + ret_val))  {
+							tok_ptr += 3;
+						}
+						else  {
+							break;
+						}
+					}
+					else  {
+						break;
+					}
+				}
+				if (i != 7)  {
+					/*
+					 * If i != 7, then we ran out of data before finding
+					 * the latitude.  Skip the record.
+					 */
+					fprintf(stderr, "Defective GNIS record:  <%s>\n", buf);
+					continue;
+				}
+				latitude = atof(tok_ptr);
+				if (((tok_ptr = strstr(tok_ptr, "\",\"")) != (unsigned char *)0) && (*tok_ptr != '\0') && (*(tok_ptr + 3) != '\0'))  {
+					tok_ptr += 3;
+					longitude = atof(tok_ptr);
+				}
+				else  {
+					fprintf(stderr, "Defective GNIS record:  <%s>\n", buf);
+					continue;
+				}
+			}
+			else  {
+				/* Old-style record. */
+				if (ret_val < 96)  {
+					/* The record is too short to process.  Ignore it. */
+					fprintf(stderr, "Defective GNIS record:  <%s>\n", buf);
+					continue;
+				}
+
+				/*
+				 * Capture the feature name for later use.
+				 * Begin by skipping over the state name at the front.
+				 */
+				gnis_feature_name = buf;
+				while (*gnis_feature_name != ' ')  gnis_feature_name++;
+				while (*gnis_feature_name == ' ')  gnis_feature_name++;
+
+				/* Work backwards from the end of the field to remove trailing blanks. */
+				for (length = 53; length >= 0; length--)  {
+					if (buf[length] != ' ')  {
+						break;
+					}
+				}
+				length++;
+				length = length - (gnis_feature_name - buf);
+
+				/*
+				 * Note:  We assume latitude_low, longitude_low, latitude_high, and longitude_high
+				 * were entered in decimal degrees.
+				 * latitude and longitude from the old-style GNIS files, however are in DDDMMSS format, and
+				 * require special conversion functions.
+				 */
+				if ((buf[86] != 'N') && (buf[86] != 'S'))  {
+					/* Defective record */
+					fprintf(stderr, "Defective GNIS record:  <%s>\n", buf);
+					continue;
+				}
+				if ((buf[95] != 'E') && (buf[95] != 'W'))  {
+					/* Defective record */
+					fprintf(stderr, "Defective GNIS record:  <%s>\n", buf);
+					continue;
+				}
+				latitude = lat_conv(&buf[80]);
+				longitude = lon_conv(&buf[88]);
+			}
+
+			/* Ignore this entry if it is out of the map area. */
+			if ((latitude < image_corners.sw_lat) || (latitude > image_corners.ne_lat))  {
+				continue;
+			}
+			if ((longitude < image_corners.sw_long) || (longitude > image_corners.ne_long))  {
+				continue;
+			}
+
+			/* draw a cursor at the specified point */
+			xx = - 1 + round((longitude - image_corners.sw_long) * (double)image_corners.x / (image_corners.ne_long - image_corners.sw_long));
+			yy = image_corners.y - 1 - round((latitude - image_corners.sw_lat) * (double)image_corners.y / (image_corners.ne_lat - image_corners.sw_lat));
+
+			a = WHITE;
+			for (i = -3; i <= 3; i++)  {
+				if (((xx + i) >= 0) && ((xx + i) <= (image_corners.x - 1)))  {
+					if (*(image_corners.ptr + (yy + TOP_BORDER) * x_prime + xx + LEFT_BORDER + i) == WHITE)  {
+						a = BLACK;
+						break;
+					}
+				}
+				if (((yy + i) >= 0) && ((yy + i) <= (image_corners.y - 1)))  {
+					if (*(image_corners.ptr + (yy + TOP_BORDER + i) * x_prime + xx + LEFT_BORDER) == WHITE)  {
+						a = BLACK;
+						break;
+					}
+				}
+			}
+			for (i = -3; i <= 3; i++)  {
+				if (((xx + i) >= 0) && ((xx + i) <= (image_corners.x - 1)))  {
+					*(image_corners.ptr + (yy + TOP_BORDER) * x_prime + xx + LEFT_BORDER + i) = a;
+				}
+				if (((yy + i) >= 0) && ((yy + i) <= (image_corners.y - 1)))  {
+					*(image_corners.ptr + (yy + TOP_BORDER + i) * x_prime + xx + LEFT_BORDER) = a;
+				}
+			}
+
+			/* If there was a feature name, then put it into the image near the cursor */
+			if (length > 0)  {
+				if ((xx + 5 + length * font_width) >= image_corners.x)  {
+					start_x = xx - 5 - length * font_width;
+				}
+				else  {
+					start_x = xx + 5;
+				}
+				if ((yy + (font_height >> 1) - 1) >= image_corners.y)  {
+					start_y = image_corners.y - font_height;
+				}
+				else if ((yy - (font_height >> 1)) < 0)  {
+					start_y = 0;
+				}
+				else  {
+					start_y = yy - (font_height >> 1);
+				}
+
+				gnis_feature_name[length] = '\0';
+				add_text(&image_corners, gnis_feature_name, length, start_x + LEFT_BORDER,
+					 start_y + TOP_BORDER, font, font_width, font_height, WHITE, -1);
+			}
+		}
+		if (gz_flag == 0)  {
+			buf_close(gnis_fdesc);
+		}
+		else  {
+			buf_close_z(gnis_fdesc);
+		}
+	}
+
+
+	/*
+	 * Put a white border around the edges of the output image.
+	 * Note that this will cover up the one-pixel slop over the left
+	 * and top edges that is the result of the fact that we
+	 * set the latitude and longitude to whole-number values, while
+	 * the pixels don't quite cover that whole area.
+	 * This was discussed at length in a previous comment.
+	 *
+	 * Note that the DEM file data don't slop over the edges because,
+	 * when we process them, they are already in the form of an array of
+	 * points, and we can cleanly discard the data we don't need.
+	 * However, the DLG and GNIS data are in the form of
+	 * latitude/longitude or UTM grid coordinates, and it is possible
+	 * for array index values of -1 to crop up at the image edges.
+	 * (In the case of GNIS data, we explicitly check for this and
+	 * don't slop over.  For DLG data, we don't bother because it is
+	 * cheaper in CPU time to just null out the border here.)
+	 */
+	for (i = 0; i < TOP_BORDER; i++)  {
+		for (j = 0; j < (image_corners.x + LEFT_BORDER + right_border); j++)  {
+			*(image_corners.ptr + i * x_prime + j) = WHITE;
+		}
+	}
+	for (i = image_corners.y + TOP_BORDER; i < (image_corners.y + TOP_BORDER + bottom_border); i++)  {
+		for (j = 0; j < (image_corners.x + LEFT_BORDER + right_border); j++)  {
+			*(image_corners.ptr + i * x_prime + j) = WHITE;
+		}
+	}
+	for (i = TOP_BORDER; i < (image_corners.y + TOP_BORDER); i++)  {
+		for (j = 0; j < LEFT_BORDER; j++)  {
+			*(image_corners.ptr + i * x_prime + j) = WHITE;
+		}
+	}
+	for (i = TOP_BORDER; i < (image_corners.y + TOP_BORDER); i++)  {
+		for (j = image_corners.x + LEFT_BORDER; j < (image_corners.x + LEFT_BORDER + right_border); j++)  {
+			*(image_corners.ptr + i * x_prime + j) = WHITE;
+		}
+	}
+
+
+	/*
+	 * Add a copyright notice to the image if, when the program was compiled, the
+	 * makefile contained a non-null COPYRIGHT_NAME.
+	 */
+	if (COPYRIGHT_NAME[0] != '\0')  {
+		time_val = time((time_t *)0);
+		sprintf(buf, "Copyright (c) %4.4s  %s", ctime(&time_val) + 20, COPYRIGHT_NAME);
+		length = strlen(buf);
+		add_text(&image_corners, buf, length, image_corners.x + LEFT_BORDER + right_border - (length * font_width + 4),
+			 image_corners.y + TOP_BORDER + bottom_border - font_height - 4, font, font_width, font_height, BLACK, WHITE);
+	}
+
+
+	if (tick_flag != 0)  {
+		/*
+		 * Put some latitude/longitude tick marks on the edges of the image.
+		 *
+		 * The purpose of the 0.049999999999 is to round the latitude/longitude up to
+		 * the nearest tenth.  Since we put a tick mark every tenth of a degree,
+		 * we need to find the first round tenth above image_corners.sw_lat/image_corners.sw_long.
+		 */
+		i = (long)round((image_corners.sw_lat + 0.049999999999) * 10.0);
+		for (; i <= ((image_corners.ne_lat + 0.0000001) * 10.0); i++)  {
+			k = TOP_BORDER - 1 + image_corners.y - round((double)image_corners.y * ((double)i * 0.1 - image_corners.sw_lat) / (image_corners.ne_lat - image_corners.sw_lat));
+			if (((i % 10) == 0) || ((i % 10) == 5) || ((i % 10) == -5))  {
+				tick_width = 6;
+
+				sprintf(buf, "%.2f%c", fabs((double)i / 10.0), i < 0 ? 'S' : 'N');
+				length = strlen(buf);
+				add_text(&image_corners, buf, length, image_corners.x + LEFT_BORDER + 7, k - (font_height >> 1), font, font_width, font_height, BLACK, WHITE);
+				add_text(&image_corners, buf, length, LEFT_BORDER - 8 - font_width * length, k - (font_height >> 1), font, font_width, font_height, BLACK, WHITE);
+			}
+			else  {
+				tick_width = 4;
+			}
+
+			for (j = LEFT_BORDER - 1; j > (LEFT_BORDER - 1 - tick_width); j--)  {	/* Left side */
+				*(image_corners.ptr + k * x_prime + j) = BLACK;
+			}
+			for (j = image_corners.x + LEFT_BORDER; j < (image_corners.x + LEFT_BORDER + tick_width); j++)  {	/* Right side */
+				*(image_corners.ptr + k * x_prime + j) = BLACK;
+			}
+		}
+		i = (long)round((image_corners.sw_long + 0.049999999999) * 10.0);
+		for (; i <= ((image_corners.ne_long + 0.0000001) * 10.0); i++)  {
+			k = LEFT_BORDER - 1 + round((double)image_corners.x * ((double)i * 0.1 - image_corners.sw_long) / (image_corners.ne_long - image_corners.sw_long));
+
+			if (((i % 10) == 0) || ((i % 10) == 5) || ((i % 10) == -5))  {
+				if (((i % 10) == 0) || (res_x_image > ((double)font_width * 15.0)))  {
+					tick_width = 6;
+
+					sprintf(buf, "%.2f%c", fabs((double)i / 10.0), i < 0 ? 'W' : 'E');
+					length = strlen(buf);
+					add_text(&image_corners, buf, length, k - ((length * font_width) >> 1), image_corners.y + TOP_BORDER + 6, font, font_width, font_height, BLACK, WHITE);
+					add_text(&image_corners, buf, length, k - ((length * font_width) >> 1), TOP_BORDER - 7 - font_height, font, font_width, font_height, BLACK, WHITE);
+				}
+			}
+			else  {
+				tick_width = 4;
+			}
+
+			for (j = TOP_BORDER - 1; j > (TOP_BORDER - 1 - tick_width); j--)  {	/* Top */
+				*(image_corners.ptr + j * x_prime + k) = BLACK;
+			}
+			for (j = image_corners.y + TOP_BORDER; j < (image_corners.y + TOP_BORDER + tick_width); j++)  {	/* Bottom */
+				*(image_corners.ptr + j * x_prime + k) = BLACK;
+			}
+		}
+	}
+
+
+	/* Add some information at the top of the image, as an image label (if there is room). */
+	if (dem_name[0] != '\0')  {
+		sprintf(buf, "%s --- ", dem_name);
+	}
+	else  {
+		buf[0] = '\0';
+	}
+	sprintf(buf + strlen(buf), "%.5g%c, %.6g%c to %.5g%c, %.6g%c",
+		fabs(image_corners.sw_lat), image_corners.sw_lat < 0 ? 'S' : 'N',
+		fabs(image_corners.sw_long), image_corners.sw_long < 0 ? 'W' : 'E',
+		fabs(image_corners.ne_lat), image_corners.ne_lat < 0 ? 'S' : 'N',
+		fabs(image_corners.ne_long), image_corners.ne_long < 0 ? 'W' : 'E');
+	length = strlen(buf);
+	if ((length * font_width) <= (image_corners.x + LEFT_BORDER + right_border - 2))  {
+		add_text(&image_corners, buf, length, (image_corners.x >> 1) + LEFT_BORDER - 1 - ((length * font_width) >> 1),
+			 (TOP_BORDER >> 1) - 1 - (font_height >> 1) - font_height, font, font_width,
+			 font_height, BLACK, WHITE);
+
+		if ((max_elevation != -100000) && (min_elevation != 100000))  {
+			/*
+			 * If the max/min elevation data is valid, then indicate
+			 * the maximum/minimum elevation
+			 */
+			latitude1 = image_corners.sw_lat + (image_corners.ne_lat - image_corners.sw_lat) * (double)(image_corners.y - min_e_lat) / (double)image_corners.y;
+			longitude1 = image_corners.sw_long + (image_corners.ne_long - image_corners.sw_long) * (double)min_e_long / (double)image_corners.x;
+			latitude2 = image_corners.sw_lat + (image_corners.ne_lat - image_corners.sw_lat) * (double)(image_corners.y - max_e_lat) / (double)image_corners.y;
+			longitude2 = image_corners.sw_long + (image_corners.ne_long - image_corners.sw_long) * (double)max_e_long / (double)image_corners.x;
+			sprintf(buf, "Elevations: %dm (%dft) at %.5g%c %.6g%c, %dm (%dft) at %.5g%c %.6g%c",
+				min_elevation,
+				round((double)min_elevation * 3.28084),
+				fabs(latitude1), latitude1 < 0 ? 'S' : 'N',
+				fabs(longitude1), longitude1 < 0 ? 'W' : 'E',
+				max_elevation,
+				round((double)max_elevation * 3.28084),
+				fabs(latitude2), latitude2 < 0 ? 'S' : 'N',
+				fabs(longitude2), longitude2 < 0 ? 'W' : 'E');
+			length = strlen(buf);
+			if ((length * font_width) <= (image_corners.x + LEFT_BORDER + right_border - 2))  {
+				add_text(&image_corners, buf, length, (image_corners.x >> 1) + LEFT_BORDER - 1 - ((length * font_width) >> 1),
+					 (TOP_BORDER >> 1) - 1 - (font_height >> 1) + 2, font, font_width,
+					 font_height, BLACK, WHITE);
+			}
+		}
+	}
+
+
+	if (contour_flag == 0)  {
+		/* Add an elevation color chart at the bottom of the image, if there is room. */
+		if ((num_dem > 0) && ((image_corners.x + LEFT_BORDER + right_border - 2) >= COLOR_CHART_WIDTH) &&
+		    (bottom_border >= (30 + 3 * font_height)))  {
+			for (i = 0; i < COLOR_CHART_WIDTH; i++)  {
+				for (j = 0; j < 16; j++)  {
+					/*
+					 * To represent a given range of elevation, we draw a square of
+					 * a given color.  We pick one of the 16 possible colors for each elevation.
+					 * This is not perfect, but it at least gives the user some
+					 * clue as to how to decode the image.  I tried filling in
+					 * all 16 colors within each elevation square, but it didn't
+					 * look all that good.
+					 */
+					*(image_corners.ptr + (TOP_BORDER + image_corners.y + (bottom_border >> 1) - ((16 + 4 + font_height * 2) >> 1) + j) * x_prime +
+						LEFT_BORDER + (image_corners.x >> 1) - (COLOR_CHART_WIDTH >> 1) + i) = (i & ~0xf) + 3;
+				}
+				if ((i & 0xf) == 0)  {
+					/* Add a tick mark */
+					*(image_corners.ptr + (TOP_BORDER + image_corners.y + (bottom_border >> 1) + 6 - font_height) * x_prime +
+						LEFT_BORDER + (image_corners.x >> 1) - (COLOR_CHART_WIDTH >> 1) + (i & 0xf0)) = BLACK;
+					*(image_corners.ptr + (TOP_BORDER + image_corners.y + (bottom_border >> 1) + 7 - font_height) * x_prime +
+						LEFT_BORDER + (image_corners.x >> 1) - (COLOR_CHART_WIDTH >> 1) + (i & 0xf0)) = BLACK;
+					*(image_corners.ptr + (TOP_BORDER + image_corners.y + (bottom_border >> 1) + 8 - font_height) * x_prime +
+						LEFT_BORDER + (image_corners.x >> 1) - (COLOR_CHART_WIDTH >> 1) + (i & 0xf0)) = BLACK;
+					if (z_flag == 0)  {
+						/* Put a text label under the tick mark. */
+						sprintf(buf, "%d", (i >> 4));
+						length = strlen(buf);
+						add_text(&image_corners, buf, length, LEFT_BORDER + (image_corners.x >> 1) - (COLOR_CHART_WIDTH >> 1) + (i & 0xf0) - ((font_width * length) >> 1),
+							TOP_BORDER + image_corners.y + (bottom_border >> 1) + 9 - font_height,
+							font, font_width, font_height, BLACK, WHITE);
+					}
+				}
+			}
+
+			/* Add a tick mark at the right end of the scale */
+			*(image_corners.ptr + (TOP_BORDER + image_corners.y + (bottom_border >> 1) + 6 - font_height) * x_prime +
+				LEFT_BORDER + (image_corners.x >> 1) - (COLOR_CHART_WIDTH >> 1) + (i & 0xf0)) = BLACK;
+			*(image_corners.ptr + (TOP_BORDER + image_corners.y + (bottom_border >> 1) + 7 - font_height) * x_prime +
+				LEFT_BORDER + (image_corners.x >> 1) - (COLOR_CHART_WIDTH >> 1) + (i & 0xf0)) = BLACK;
+			*(image_corners.ptr + (TOP_BORDER + image_corners.y + (bottom_border >> 1) + 8 - font_height) * x_prime +
+				LEFT_BORDER + (image_corners.x >> 1) - (COLOR_CHART_WIDTH >> 1) + (i & 0xf0)) = BLACK;
+
+			if (z_flag == 0)  {
+				/* Put in an "infinity" sign by jamming two 'o' characters together. */
+				sprintf(buf, "o");
+				length = strlen(buf);
+				add_text(&image_corners, buf, length, LEFT_BORDER + (image_corners.x >> 1) - (COLOR_CHART_WIDTH >> 1) + (i & 0xf0) - 1,
+					TOP_BORDER + image_corners.y + (bottom_border >> 1) + 9 - font_height,
+					font, font_width, font_height, BLACK, -2);
+				add_text(&image_corners, buf, length, LEFT_BORDER + (image_corners.x >> 1) - (COLOR_CHART_WIDTH >> 1) + (i & 0xf0) - ((font_width * length) >> 1) - 2,
+					TOP_BORDER + image_corners.y + (bottom_border >> 1) + 9 - font_height,
+					font, font_width, font_height, BLACK, -2);
+			}
+			else  {
+				/*
+				 * If z_flag is set, then we have altered the elevations in the color
+				 * map so that the entire color map gets used between min_elevation
+				 * and max_elevation.  In this case, we don't try to label every
+				 * tick mark.  We just label the two end tick marks with min_elevation
+				 * and max_elevation.
+				 */
+				i = min_elevation < 0 ? 0 : min_elevation;
+				sprintf(buf, "%-5.4g", (double)round((double)i * 3.28084) / 1000.0);
+				length = strlen(buf);
+				add_text(&image_corners, buf, length, LEFT_BORDER + (image_corners.x >> 1) - (COLOR_CHART_WIDTH >> 1) - (font_width >> 1),
+					TOP_BORDER + image_corners.y + (bottom_border >> 1) + 9 - font_height,
+					font, font_width, font_height, BLACK, WHITE);
+
+				sprintf(buf, "%5.4g", (double)round((double)max_elevation * 3.28084) / 1000.0);
+				length = strlen(buf);
+				add_text(&image_corners, buf, length, LEFT_BORDER + (image_corners.x >> 1) - (COLOR_CHART_WIDTH >> 1) + (COLOR_CHART_WIDTH & 0xf0) - (font_width >> 1) * ((length << 1) - 1),
+					TOP_BORDER + image_corners.y + (bottom_border >> 1) + 9 - font_height,
+					font, font_width, font_height, BLACK, WHITE);
+			}
+
+			/* Add a line to describe the units. */
+			sprintf(buf, "Thousands of feet.");
+			length = strlen(buf);
+			add_text(&image_corners, buf, length, (image_corners.x >> 1) + LEFT_BORDER - 1 - ((length * font_width) >> 1),
+				 TOP_BORDER + image_corners.y + (bottom_border >> 1) + 9, font, font_width,
+				 font_height, BLACK, WHITE);
+		}
+	}
+	else  {
+		/* Add a message about the contour interval at the bottom of the image, if there is room. */
+		if (num_dem > 0)  {
+			sprintf(buf, "Contour interval is %.2f meters (%.2f feet).", contour_intvl, contour_intvl * 3.28084);
+			length = strlen(buf);
+			if ((length * font_width) <= (image_corners.x + LEFT_BORDER + right_border - 2))  {
+				add_text(&image_corners, buf, length, (image_corners.x >> 1) + LEFT_BORDER - 1 - ((length * font_width) >> 1),
+					 TOP_BORDER + image_corners.y + (bottom_border >> 1) + 1 + (font_height >> 1), font, font_width,
+					 font_height, BLACK, WHITE);
+			}
+		}
+	}
+
+
+	/* Create the output file. */
+	if ((output_fdesc = open(output_file, O_WRONLY | O_CREAT | O_TRUNC, 0644)) < 0)  {
+		fprintf(stderr, "Can't create %s for writing, errno = %d\n", output_file, errno);
+		exit(0);
+	}
+
+
+	/* Initialize SUN rasterfile header. */
+	hdr.magic = MAGIC;
+	hdr.width = image_corners.x + LEFT_BORDER + right_border;
+	hdr.height = image_corners.y + TOP_BORDER + bottom_border;
+	hdr.depth = 8;
+	hdr.length = (image_corners.x + LEFT_BORDER + right_border) * (image_corners.y + TOP_BORDER + bottom_border);
+	hdr.type = STANDARD;
+	hdr.maptype = EQUAL_RGB;
+	hdr.maplength = 768;
+
+	/*
+	 * Write SUN rasterfile header and color map.
+	 * My X86 Linux machine (LITTLE_ENDIAN) requires some swabbing
+	 * (byte swapping) in the rasterfile header.
+	 * You may have a BIG_ENDIAN machine (which should require no
+	 * swabbing at all), a PDP_ENDIAN machine (which requires a
+	 * more complicated swabbing), or something else (with its
+	 * own form of swabbing).
+	 */
+	byte_order = swab_type();
+	if (byte_order == 0)  {
+		/* BIG_ENDIAN: Do nothing */
+	}
+	else if (byte_order == 1)  {
+		/* LITTLE_ENDIAN */
+		lsize = sizeof(struct rasterfile) / 4;
+		lptr = (long *)&hdr;
+		for (i = 0; i < lsize; i++)  {
+			LE_SWAB(lptr);
+			lptr++;
+		}
+	}
+	else if (byte_order == 2)  {
+		/* PDP_ENDIAN */
+		lsize = sizeof(struct rasterfile) / 4;
+		lptr = (long *)&hdr;
+		for (i = 0; i < lsize; i++)  {
+			PDP_SWAB(lptr);
+			lptr++;
+		}
+	}
+	else  {
+		/* Unknown */
+		fprintf(stderr, "Unknown machine type:  you will need to modify drawmap.c to do proper swabbing.\n");
+		exit(0);
+	}
+	write(output_fdesc, &hdr, sizeof(struct rasterfile));
+	write(output_fdesc, map, sizeof(map));
+
+
+	/* Output the image data. */
+	for (i = 0; i < (image_corners.y + TOP_BORDER + bottom_border); i++)  {
+		write(output_fdesc, image_corners.ptr + i * x_prime, image_corners.x + LEFT_BORDER + right_border);
+	}
+
+	free(image_corners.ptr);
+	close(output_fdesc);
+
+
+	/* For debugging. */
+/*	for (i = 0; i < 256; i++)  {
+/*		if (histogram[i] != 0)  {
+/*			fprintf(stderr, "histogram[%3d] = %d\n", i, histogram[i]);
+/*		}
+/*	}
+*/
+}
+
+
+
+
+/*
+ * Convert elevation gradient information into an index that
+ * can be used to select a color from the color table.
+ * This routine was largely developed by trial and error.
+ * There is no deep theory associated with the numeric values
+ * contained herein.
+ */
+long
+get_factor(double gradient)
+{
+	double angle, fraction;
+	long i;
+
+	/*
+	 * A table that works fairly well:
+	 *
+	 *	0.405,
+	 *	0.445,
+	 *	0.470,
+	 *	0.485,
+	 *	0.495,
+	 *	0.497,
+	 *	0.499,
+	 *	0.500,
+	 *	0.501,
+	 *	0.503,
+	 *	0.505,
+	 *	0.515,
+	 *	0.530,
+	 *	0.555,
+	 *	0.595,
+	 *
+	 * The table is duplicated in this comment so that we can
+	 * play with the actual table without losing track of a set of
+	 * values that work reasonably well.
+	 */
+	double table[15] =  {
+		0.405,
+		0.445,
+		0.470,
+		0.485,
+		0.495,
+		0.497,
+		0.499,
+		0.500,
+		0.501,
+		0.503,
+		0.505,
+		0.515,
+		0.530,
+		0.555,
+		0.595,
+	};
+
+	/* One possible way to create the table automatically. */
+//	for (i = 0; i < 15; i++)  {
+//		table[i] = table[0] + (table[14] - table[0]) * pow((table[i] - table[0]) / (table[14] - table[0]), 0.9);
+//	}
+
+	angle = atan(gradient) + (M_PI/2.0);
+	fraction = angle / (M_PI);
+//	angle_hist[round(fraction * 100000.0)]++;	/* For debugging. */
+//	total++;	/* For debugging. */
+
+	if (fraction > 1.0)  {
+		fprintf(stderr, "bad fraction in get_factor(%f):  %f\n", gradient, fraction);
+	}
+
+	if (fraction < table[0])  {
+		return(0);
+	}
+	else if (fraction < table[1])  {
+		return(1);
+	}
+	else if (fraction < table[2])  {
+		return(2);
+	}
+	else if (fraction < table[3])  {
+		return(3);
+	}
+	else if (fraction < table[4])  {
+		return(4);
+	}
+	else if (fraction < table[5])  {
+		return(5);
+	}
+	else if (fraction < table[6])  {
+		return(6);
+	}
+	else if (fraction < table[7])  {
+		return(7);
+	}
+	else if (fraction < table[8])  {
+		return(8);
+	}
+	else if (fraction < table[9])  {
+		return(9);
+	}
+	else if (fraction < table[10])  {
+		return(10);
+	}
+	else if (fraction < table[11])  {
+		return(11);
+	}
+	else if (fraction < table[12])  {
+		return(12);
+	}
+	else if (fraction < table[13])  {
+		return(13);
+	}
+	else if (fraction < table[14])  {
+		return(14);
+	}
+	else  {
+		return(15);
+	}
+}
+
+
+
+
+/*
+ * Write a text string into the image.
+ */
+void
+add_text(struct image_corners *image_corners, char *text_string, long text_string_length, long top_left_x,
+	 long top_left_y, unsigned char *font, long font_width, long font_height, long foreground, long background)
+{
+	long i, j, k;
+	long bit;
+
+	/*
+	 * Cycle through the font table for each given character in the text string.
+	 * Characters are represented as bit maps, with a 1 indicating part of the
+	 * character, and a 0 indicating part of the background.
+	 */
+	for (i = 0; i < text_string_length; i++)  {
+		for (j = 0; j < font_width; j++)  {
+			for (k = 0; k < font_height; k++)  {
+				bit = (*(font + k * 128 + *(text_string + i)) >> (font_width - 1 - j)) & 1;
+				if (bit != 0)  {
+					/* foreground */
+					*(image_corners->ptr + (top_left_y + k) * x_prime + top_left_x + i * font_width + j) = foreground;
+				}
+				else  {
+					/* background */
+					if (background < 0)  {
+						/*
+						 * If the background color map index is -1, then
+						 * we don't insert a specific background value, but rather
+						 * reduce the existing background in intensity.
+						 *
+						 * If the background color map index is any other negative
+						 * number, then we use a clear background.
+						 */
+						if (background == -1)  {
+							*(image_corners->ptr + (top_left_y + k) * x_prime + top_left_x + i * font_width + j) +=
+								(16 - (*(image_corners->ptr + (top_left_y + k) * x_prime + top_left_x + i * font_width + j) & 0xf)) >> 1;
+						}
+					}
+					else  {
+						*(image_corners->ptr + (top_left_y + k) * x_prime + top_left_x + i * font_width + j) = background;
+					}
+				}
+			}
+		}
+	}
+}
+
+
+
+
+
+/*
+ * This routine prepares a storage array for DEM data.
+ * Since the code is fairly long, and appears several times
+ * in the program, it has been encapsulated here.
+ */
+void
+get_short_array(short **ptr, long x, long y)
+{
+	long i, j;
+
+	/*
+	 * Get memory for the DEM data.
+	 *
+	 * As all of the DEM files are read in, their data
+	 * eventually get combined into this storage area.
+	 * On the way, the data may get cropped, smoothed, or
+	 * subsampled.
+	 */
+	*ptr = (short *)malloc(sizeof(short) * (y + 1) * (x + 1));
+	if (*ptr == (short *)0)  {
+		fprintf(stderr, "malloc of *ptr failed\n");
+		exit(0);
+	}
+
+
+	/*
+	 * Before reading in the DEM data, initialize the entire image to
+	 * HIGHEST_ELEVATION, which will eventually be translated to the color WHITE.
+	 * This is because we don't know, in advance, which parts of the image will
+	 * be covered with data from the various DEM files.  The user does not have
+	 * to provide enough DEM files to fully tile the user-specified range of
+	 * latitude and longitude.
+	 */
+	for (i = 0; i <= y; i++)  {
+		for (j = 0; j <= x; j++)  {
+			*(*ptr + i * (x + 1) + j) = HIGHEST_ELEVATION;
+		}
+	}
+}
+
+
+
+
+
+
+/*
+ * This function produces a texture map, for use with the "povray"
+ * ray tracing package, that corresponds to the height-field map
+ * produced in response to the "-h" option.
+ *
+ * If you aren't familiar with ray tracing, and povray, this
+ * function probably won't mean much to you.  If you are
+ * familiar with povray, then the function's purpose should be
+ * fairly obvious.
+ */
+void
+gen_texture(long min_elevation, long max_elevation, struct color_tab *color_tab, char *output_file)
+{
+	FILE *texture_stream;
+	long i;
+	double inflection;
+
+	if ((texture_stream = fopen("drawmap.pov", "w+")) < 0)  { 
+		fprintf(stderr, "Can't create %s for writing, errno = %d\n", "drawmap.pov", errno); 
+		exit(0);
+	}
+
+
+	/*
+	 * Put some useful comments at the top of the file.
+	 */
+	fprintf(texture_stream, "// Povray (version 3) file, generated by drawmap.\n");
+	fprintf(texture_stream, "// Assuming that you have povray3 installed in the normal place,\n// this file should be render-able by typing:\n");
+	fprintf(texture_stream, "// x-povray +L/usr/local/lib/povray3/include +A +I drawmap.pov +O drawmap.tga +SP8 +EP1 +H600 +W600 +D11\n");
+	fprintf(texture_stream, "// The file will probably require manual editing to get things the way you want them.\n\n");
+	fprintf(texture_stream, "#include \"colors.inc\"\n\n");
+
+	/*
+	 * Generate a texture entry for sea-level, using bright blue from the color map.
+	 */
+	fprintf(texture_stream, "#declare TextureSea = texture { pigment { color rgb<%g, %g, %g> } finish { ambient 0.1 diffuse 0.4 brilliance 1.0 reflection 1.0 phong 1.0 phong_size 30.0 }}\n",
+		((double)brights[2].red) / 255.0, ((double)brights[2].green) / 255.0, ((double)brights[2].blue) / 255.0);
+	/*
+	 * Generate texture entries for other elevations, using whichever color map is currently in use.
+	 */
+	for (i = 0; i < MAX_VALID_BANDS; i++)  {
+		fprintf(texture_stream, "#declare Texture%d = texture { pigment { color rgb<%g, %g, %g> } finish { ambient 0.1 diffuse 0.4 brilliance 1.0 reflection 1.0 phong 1.0 phong_size 30.0 }}\n", i, 
+			(double)color_tab[i].red / 255.0, (double)color_tab[i].green / 255.0, (double)color_tab[i].blue / 255.0);
+	}
+
+	/*
+	 * Generate the main body of the file, including the texture map.
+	 */
+	fprintf(texture_stream, "camera{\n\tlocation <0.5, 15, -16>\n\tlook_at 0\n\tangle 30\n}\n\n");
+
+	fprintf(texture_stream, "light_source{ <-1000,1000,-1000> White }\n\n");
+
+	fprintf(texture_stream, "// height field generated for source data with elevations ranging from %d to %d.\n",
+				min_elevation, max_elevation);
+	fprintf(texture_stream, "// Points with negative elevations in the original data may have been set to zero.\n");
+	fprintf(texture_stream, "// Points with undefined elevations in the original data may have been set to zero.\n");
+	fprintf(texture_stream, "height_field {\n\tpgm \"%s\" water_level %g\n\tsmooth\n\ttexture {\n",
+				output_file, (double)min_elevation / (double)max_elevation);
+	fprintf(texture_stream, "\t\tgradient y\n");
+	fprintf(texture_stream, "\t\ttexture_map  {\n");
+
+	fprintf(texture_stream, "\t\t[ 0.0 TextureSea ]\n");
+	fprintf(texture_stream, "\t\t[ 0.000001 Texture0 ]\n");
+	for (i = 1; i < MAX_VALID_BANDS; i++)  {
+		inflection = (double)color_tab[i - 1].max_elevation / (double)max_elevation;
+		if (inflection > 1.0)  {
+			break;
+		}
+		fprintf(texture_stream, "\t\t[ %g Texture%d ]\n", inflection, i);
+	}
+
+	fprintf(texture_stream, "\t\t}\n\t}\n");
+	fprintf(texture_stream, "//\tThe middle scale factor in the \"scale\" line controls how much the terrain is stretched vertically.\n");
+	fprintf(texture_stream, "\ttranslate <-0.5, -0.5, -0.5>\n\tscale <10, 0.8, 10>\n}\n");
+
+	fclose(texture_stream);
+}

Added: packages/drawmap/branches/upstream/current/drawmap.h
===================================================================
--- packages/drawmap/branches/upstream/current/drawmap.h	                        (rev 0)
+++ packages/drawmap/branches/upstream/current/drawmap.h	2007-05-18 12:29:03 UTC (rev 834)
@@ -0,0 +1,245 @@
+/*
+ * =========================================================================
+ * drawmap.h - A header file containing global information for all of drawmap.
+ * Copyright (c) 2000  Fred M. Erickson
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2, or (at your option)
+ * any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program; if not, write to the Free Software
+ * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
+ * =========================================================================
+ */
+
+
+
+/* #define COPYRIGHT_NAME	"Fred M. Erickson" */	/* Now defined in the Makefile */
+
+#define ONE_DEGREE_DEM_SIZE	1201
+
+#define BUF_SIZE	16384	// Generic buffer size --- should be large enough to never cause trouble
+#define MAX_GNIS_RECORD	1024	// Assumed by the code to be less than or equal to DEM_RECORD_LENGTH, defined in dem.h
+#define MAX_FILE_NAME	1000
+
+
+/* The borders should be at least 60, if possible.  They must be even numbers. */
+#define TOP_BORDER	60
+#define BOTTOM_BORDER	80
+#define LEFT_BORDER	60
+#define RIGHT_BORDER	60
+
+extern long bottom_border;
+extern long right_border;
+
+#define	NUM_DEM	1000	/* Number of DEM files allowed on input */
+
+#define HIGHEST_ELEVATION	32000	/* Elevation higher than any elevation expected in the DEM data */
+
+#define SMOOTH_MAX	10	/* maximum radius of smoothing kernel */
+
+#define OMIT_NEATLINES	1	/* If this is non-zero, then neatlines won't be drawn on the image. */
+
+
+/*
+ * These are SUN color map index values for shaded relief.  They are defined here
+ * because most drawing functions need access to them, and because users normally
+ * shouldn't change them.  User-changeable color information is defined
+ * in colors.h.
+ */
+#define	C_INDEX_0		0
+#define	C_INDEX_1		16
+#define	C_INDEX_2		32	// Really more like brown than orange
+#define	C_INDEX_3		48
+#define	C_INDEX_4		64
+#define	C_INDEX_5		80
+#define	C_INDEX_6		96
+#define	C_INDEX_7		112
+#define	C_INDEX_8		128
+#define	C_INDEX_9		144
+#define	C_INDEX_10		160
+#define	C_INDEX_11		176
+#define	C_INDEX_12		192
+#define	C_INDEX_13		208
+#define	C_INDEX_14		224	// Currently unused
+#define	C_INDEX_15		240
+#define COLOR_CHART_WIDTH	C_INDEX_14
+
+
+
+
+/*
+ * These are the SUN rasterfile color map index values for various primary
+ * colors.  They are included here since most drawing functions need them.
+ * The actual color definition is done in colors.h
+ */
+#define	B_RED		(C_INDEX_15)		// BRIGHT RED
+#define	B_GREEN		(C_INDEX_15 + 1)	// BRIGHT GREEN
+#define	B_BLUE		(C_INDEX_15 + 2)	// BRIGHT BLUE
+#define	BLACK		(C_INDEX_15 + 3)	// BLACK
+#define GRAY		(C_INDEX_15 + 4)	// GRAY
+#define L_ORANGE	(C_INDEX_15 + 5)	// LIGHT ORANGE
+#define WHITE		(C_INDEX_15 + 6)	// WHITE
+
+
+
+/*
+ * Structure used to pass datum parameters between functions.
+ */
+struct datum  {
+	double a;		// Semimajor ellipsoid radius (equatorial radius)
+	double b;		// Semiminor ellipsoid radius (polar radius)
+	double e_2;		// Eccentricity squared,  e^2 = 2*f - f*f = 1 - b*b / a*a
+	double f_inv;		// Inverse flattening,  1/f = a / (a - b)
+	double k0;		// scale factor along central meridian
+	double a0;		// First coefficient in Redfearn integral expansion
+	double a2;		// Second coefficient in Redfearn integral expansion
+	double a4;		// Third coefficient in Redfearn integral expansion
+	double a6;		// Fourth coefficient in Redfearn integral expansion
+};
+
+/*
+ * These are the parameters for the Clarke 1866 ellipsoid, which is used with the
+ * North American Datum of 1927 (NAD-27) datum.  The NAD-27 used a point on
+ * Meade Ranch in Kansas as its reference origin.
+ */
+#define NAD27_SEMIMAJOR (6378206.4)		// 1866 Clarke ellipsoid, equatorial radius in meters
+#define NAD27_SEMIMINOR (6356583.8)		// 1866 Clarke ellipsoid, polar radius in meters
+#define NAD27_E_SQUARED (0.006768658)		// 1866 Clarke ellipsoid, eccentricity squared	(e^2 = 2*f - f*f = 1 - b^2 / a^2)
+#define NAD27_F_INV (294.9786982)		// 1866 Clarke ellipsoid, inverse flattening	(1/f = a / (a - b))
+#define NAD27_A0 (0.99830568187775514389)	// First constant in meridian integral expansion:  1 - (e^2 / 4) - (3 * e^4 / 64) - (5 * e^6 / 256)
+#define NAD27_A2 (0.00254255550867060247)	// First constant in meridian integral expansion:  (3/8) * (e^2 + e^4 / 4 + 15 e^6 / 128)
+#define NAD27_A4 (0.00000269808452963108)	// First constant in meridian integral expansion:  (15 / 256) * (e^4 + 3 * e^6 / 4)
+#define NAD27_A6 (0.00000000353308874387)	// First constant in meridian integral expansion:  35 * e^6 / 3072
+
+/*
+ * These are the parameters for the Geodetic Reference System (GRS) 1980 ellipsoid,
+ * which is used with the North American Datum of 1983 (NAD-83) datum.  This datum
+ * is based on a reference point at the center of the earth, and is defined based on
+ * satellite measurements.
+ *
+ * Other parameters:
+ *  Polar radius of curvature (c) 6399593.6259 m
+ *  angular velocity (w) 7292115e-11 radians/s
+ *  Gravitational Constant (G) 986005e8 m^3/s^2
+ *  Flattening (f) 0.00335281068118
+ */
+#define NAD83_SEMIMAJOR (6378137.0)		// GRS80 ellipsoid, equatorial radius
+#define NAD83_SEMIMINOR (6356752.3141)		// GRS80 ellipsoid, polar radius
+#define NAD83_E_SQUARED (0.00669438002290)	// GRS80 ellipsoid, e*e	(e^2 = 2*f - f*f = 1 - b^2 / a^2)
+#define NAD83_F_INV (298.257222101)		// GRS80 ellipsoid, inverse flattening	(1/f = a / (a - b))
+#define NAD83_A0 (0.99832429844458494622)	// First constant in meridian integral expansion:  1 - (e^2 / 4) - (3 * e^4 / 64) - (5 * e^6 / 256)
+#define NAD83_A2 (0.00251460707284452333)	// First constant in meridian integral expansion:  (3/8) * (e^2 + e^4 / 4 + 15 e^6 / 128)
+#define NAD83_A4 (0.00000263904662023027)	// First constant in meridian integral expansion:  (15 / 256) * (e^4 + 3 * e^6 / 4)
+#define NAD83_A6 (0.00000000341804613677)	// First constant in meridian integral expansion:  35 * e^6 / 3072
+
+/*
+ * These are the parameters for the World Geodetic System (WGS) 1984
+ * ellipsoid.
+ * (The WGS-84 ellipsoid is virtually identical to GRS-80.)
+ *
+ * Other parameters:
+ *  Polar radius of curvature (c) 6399593.6258 m
+ *  angular velocity (w) 7292115e-11 radians/s
+ *  Gravitational Constant (G) 986005e8 m^3/s^2
+ *  Flattening (f) 0.00335281066474
+ */
+#define WGS84_SEMIMAJOR (6378137.0)		// WGS-84 ellipsoid, equatorial radius
+#define WGS84_SEMIMINOR (6356752.3142)		// WGS-84 ellipsoid, polar radius
+#define WGS84_E_SQUARED (0.00669437999013)	// WGS-84 ellipsoid, e*e	(e^2 = 2*f - f*f = 1 - b^2 / a^2)
+#define WGS84_F_INV (298.257223563)		// WGS-84 ellipsoid, inverse flattening	(1/f = a / (a - b))
+#define WGS84_A0 (0.99832429845279809866)	// First constant in meridian integral expansion:  1 - (e^2 / 4) - (3 * e^4 / 64) - (5 * e^6 / 256)
+#define WGS84_A2 (0.00251460706051444693)	// First constant in meridian integral expansion:  (3/8) * (e^2 + e^4 / 4 + 15 e^6 / 128)
+#define WGS84_A4 (0.00000263904659432867)	// First constant in meridian integral expansion:  (15 / 256) * (e^4 + 3 * e^6 / 4)
+#define WGS84_A6 (0.00000000341804608657)	// First constant in meridian integral expansion:  35 * e^6 / 3072
+
+
+/*
+ * To convert to/from UTM coordinates we need to know the scale factor on the central meridian.
+ * For UTM, this is always 0.9996.
+ */
+#define UTM_K0 (0.9996)			// UTM Scale factor on the central meridian
+
+
+/*
+ * This structure is for passing information about a block of image data
+ * between routines.  It defines two opposite corners of the data
+ * block, in terms of latitude and longitude.  It also defines the
+ * x-by-y size of the block, in terms of number of data points.
+ */
+struct image_corners  {
+	unsigned char *ptr;	// A pointer to the block of memory containing the data
+
+	double sw_x_gp;		// lowest x UTM coordinate in data block (the _gp stands for ground planimetric coordinates)
+	double sw_y_gp;		// lowest y UTM coordinate in data block
+	long   sw_zone;		// UTM zone of southwest corner
+
+	double ne_x_gp;		// highest x UTM coordinate in data block
+	double ne_y_gp;		// highest y UTM coordinate in data block
+	long   ne_zone;		// UTM zone of northeast corner
+
+	double sw_lat;		// lowest latitude in data block
+	double sw_long;		// lowest longitude in data block
+	double ne_lat;		// highest latitude in data block
+	double ne_long;		// highest longitude in data block
+
+	long x;			// number of samples in a row
+	long y;			// number of samples in a column
+};
+
+
+
+long round(double);
+long max(long, long);
+double max3(double, double, double);
+double min3(double, double, double);
+ssize_t buf_read(int, void *, size_t);
+ssize_t buf_write(int, const void *, size_t);
+ssize_t get_a_line(int, void *, size_t);
+ssize_t buf_read_z(int, void *, size_t);
+ssize_t get_a_line_z(int, void *, size_t);
+double lat_conv(unsigned char *);
+double lon_conv(unsigned char *);
+double find_latitude(double, double);
+double find_longitude(double, double);
+long redfearn(struct datum *, double *, double *, long *, double, double, long);
+long redfearn_inverse(struct datum *, double, double, long, double *, double *);
+void decimal_degrees_to_dms(double, long *, long *, double *);
+long swab_type();
+
+/*
+ * Some macros to do swabbing.
+ * LE_SWAB() does Little-Endian swabbing.
+ * PDP_SWAB() does PDP-Endian swabbing.
+ *
+ * The argument "num" is a pointer to a long word to be swabbed.
+ */
+#define LE_SWAB(num)  { \
+	unsigned char a, b, c, d; \
+	a = ((*(num)) >> 24) & 0xff; \
+	b = ((*(num)) >> 16) & 0xff; \
+	c = ((*(num)) >> 8) & 0xff; \
+	d = (*(num)) & 0xff; \
+	*(num) = d << 8; \
+	*(num) = (*(num) | c) << 8; \
+	*(num) = (*(num) | b) << 8; \
+	*(num) = *(num) | a; \
+}
+#define PDP_SWAB(num)  { \
+	unsigned char a, b, c, d; \
+	a = ((*(num)) >> 24) & 0xff; \
+	b = ((*(num)) >> 16) & 0xff; \
+	c = ((*(num)) >> 8) & 0xff; \
+	d = (*(num)) & 0xff; \
+	*(num) = b << 8; \
+	*(num) = (*(num) | a) << 8; \
+	*(num) = (*(num) | d) << 8; \
+	*(num) = *(num) | c; \
+}

Added: packages/drawmap/branches/upstream/current/font_5x8.h
===================================================================
--- packages/drawmap/branches/upstream/current/font_5x8.h	                        (rev 0)
+++ packages/drawmap/branches/upstream/current/font_5x8.h	2007-05-18 12:29:03 UTC (rev 834)
@@ -0,0 +1,30 @@
+/*
+ * =========================================================================
+ * font_5x8.h - A definition for a 5 by 8 bitmapped font.
+ * Copyright (c) 1997  Fred M. Erickson
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2, or (at your option)
+ * any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program; if not, write to the Free Software
+ * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
+ * =========================================================================
+ */
+unsigned char font_5x8[8][128]  =  {
+  0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xa, 0x4, 0x0, 0x4, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x6, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x3, 0x0, 0xc, 0x0, 0x0,
+  0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x4, 0xa, 0xa, 0xe, 0x8, 0xa, 0x6, 0x2, 0x8, 0x9, 0x4, 0x0, 0x0, 0x0, 0x1, 0x4, 0x4, 0x6, 0x6, 0x2, 0xf, 0x6, 0xf, 0x6, 0x6, 0x6, 0x6, 0x2, 0x0, 0x8, 0x4, 0x9, 0x6, 0xe, 0x6, 0xe, 0xf, 0xf, 0x6, 0x9, 0xe, 0x7, 0x9, 0x8, 0x9, 0x9, 0x6, 0xe, 0x6, 0xe, 0x6, 0x1f, 0x9, 0x9, 0x9, 0x9, 0x11, 0xf, 0xe, 0x8, 0xe, 0x4, 0x0, 0xc, 0x0, 0x8, 0x0, 0x1, 0x0, 0x2, 0x0, 0x8, 0x4, 0x2, 0x8, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x4, 0x4, 0x2, 0x5, 0x0,
+  0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x4, 0xa, 0x1f, 0x14, 0xa, 0xa, 0x4, 0x4, 0x4, 0x6, 0x4, 0x0, 0x0, 0x0, 0x1, 0xa, 0xc, 0x9, 0x9, 0x6, 0x8, 0x8, 0x1, 0x9, 0x9, 0x6, 0x6, 0x4, 0xe, 0x4, 0xa, 0x13, 0x9, 0x9, 0x9, 0x9, 0x8, 0x8, 0x9, 0x9, 0x4, 0x1, 0xa, 0x8, 0xf, 0xd, 0x9, 0x9, 0x9, 0x9, 0x9, 0x4, 0x9, 0x9, 0x9, 0x9, 0x11, 0x1, 0x8, 0x8, 0x2, 0xa, 0x0, 0x8, 0x0, 0x8, 0x0, 0x1, 0x0, 0x5, 0x0, 0x8, 0x0, 0x0, 0x8, 0x4, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x2, 0x4, 0x4, 0xa, 0x0,
+  0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x4, 0xa, 0xa, 0xe, 0x4, 0x4, 0x8, 0x4, 0x4, 0xf, 0x1f, 0x0, 0x0, 0x0, 0x2, 0xa, 0x4, 0x1, 0x2, 0xa, 0xe, 0xa, 0x2, 0x6, 0xb, 0x0, 0x0, 0x8, 0x0, 0x2, 0x2, 0x15, 0x9, 0xe, 0x8, 0x9, 0xe, 0xe, 0x8, 0xf, 0x4, 0x1, 0xc, 0x8, 0xf, 0xf, 0x9, 0x9, 0x9, 0x9, 0x4, 0x4, 0x9, 0x9, 0x9, 0x6, 0xa, 0x2, 0x8, 0x4, 0x2, 0xa, 0x0, 0x4, 0x5, 0xe, 0x6, 0x5, 0x6, 0x4, 0x6, 0xe, 0xc, 0x2, 0x9, 0x4, 0xa, 0xe, 0x6, 0xe, 0x7, 0xa, 0xe, 0xe, 0x9, 0xa, 0x11, 0x9, 0x9, 0xf, 0xc, 0x4, 0x3, 0x0, 0x0,
+  0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 0x1f, 0x5, 0xa, 0xa, 0x0, 0x4, 0x4, 0x6, 0x4, 0x6, 0xf, 0x4, 0x4, 0xa, 0x4, 0x6, 0x1, 0xf, 0x1, 0xd, 0x2, 0x9, 0x5, 0x6, 0x6, 0x8, 0xe, 0x2, 0x4, 0x15, 0xf, 0x9, 0x8, 0x9, 0x8, 0x8, 0xb, 0x9, 0x4, 0x1, 0xa, 0x8, 0x9, 0xb, 0x9, 0xe, 0xd, 0xe, 0x2, 0x4, 0x9, 0x9, 0xf, 0x6, 0x4, 0x4, 0x8, 0x2, 0x2, 0x0, 0x0, 0x0, 0xb, 0x9, 0x8, 0xb, 0xf, 0xe, 0x9, 0x9, 0x4, 0x2, 0xe, 0x4, 0x15, 0x9, 0x9, 0x9, 0x9, 0xd, 0xc, 0x4, 0x9, 0xa, 0x15, 0x6, 0x9, 0x2, 0x2, 0x4, 0x4, 0x0, 0x0,
+  0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xa, 0xe, 0x2, 0xa, 0x0, 0x2, 0x8, 0x9, 0x4, 0x4, 0x0, 0xe, 0x8, 0xa, 0x4, 0x8, 0x9, 0x2, 0x9, 0x9, 0x4, 0x9, 0x1, 0x6, 0x4, 0x4, 0x0, 0x4, 0x0, 0x12, 0x9, 0x9, 0x9, 0x9, 0x8, 0x8, 0x9, 0x9, 0x4, 0x9, 0xa, 0x8, 0x9, 0xb, 0x9, 0x8, 0xb, 0xb, 0x9, 0x4, 0x9, 0x6, 0xf, 0x9, 0x4, 0x8, 0x8, 0x1, 0x2, 0x0, 0x0, 0x0, 0xb, 0x9, 0x8, 0xb, 0x8, 0x4, 0x7, 0x9, 0x4, 0x2, 0x9, 0x4, 0x15, 0x9, 0x9, 0xe, 0x7, 0x8, 0x2, 0x5, 0x9, 0xa, 0x15, 0x6, 0x7, 0x4, 0x4, 0x4, 0x2, 0x0, 0x0,
+  0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 0xa, 0x4, 0x0, 0x5, 0x0, 0x0, 0x0, 0x0, 0x0, 0x8, 0x0, 0x4, 0x8, 0x4, 0xe, 0xf, 0x6, 0x2, 0x6, 0x6, 0x4, 0x6, 0x6, 0x0, 0x8, 0x2, 0x0, 0x8, 0x4, 0x8, 0x9, 0xe, 0x6, 0xe, 0xf, 0x8, 0x6, 0x9, 0xe, 0x6, 0x9, 0xe, 0x9, 0x9, 0x6, 0x8, 0x6, 0x9, 0x6, 0x4, 0x6, 0x6, 0x9, 0x9, 0x4, 0xf, 0xe, 0x1, 0xe, 0x0, 0x0, 0x0, 0x5, 0xe, 0x6, 0x5, 0x6, 0x4, 0x1, 0x9, 0xe, 0xa, 0x9, 0xe, 0x11, 0x9, 0x6, 0x8, 0x1, 0x8, 0xe, 0x2, 0x7, 0x4, 0xe, 0x9, 0x9, 0xf, 0x3, 0x4, 0xc, 0x0, 0x0,
+  0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x6, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xf, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x6, 0x0, 0x0, 0x4, 0x0, 0x0, 0x0, 0x0, 0x0, 0x8, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x6, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
+};

Added: packages/drawmap/branches/upstream/current/font_6x10.h
===================================================================
--- packages/drawmap/branches/upstream/current/font_6x10.h	                        (rev 0)
+++ packages/drawmap/branches/upstream/current/font_6x10.h	2007-05-18 12:29:03 UTC (rev 834)
@@ -0,0 +1,32 @@
+/*
+ * =========================================================================
+ * font_6x10.h - A definition for a 6 by 10 bitmapped font.
+ * Copyright (c) 1997  Fred M. Erickson
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2, or (at your option)
+ * any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program; if not, write to the Free Software
+ * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
+ * =========================================================================
+ */
+unsigned char font_6x10[10][128]  = {
+  0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
+  0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x4, 0xa, 0xa, 0x4, 0x9, 0x8, 0x6, 0x2, 0x8, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x4, 0x4, 0xe, 0x1f, 0x2, 0x1f, 0x6, 0x1f, 0xe, 0xe, 0x0, 0x0, 0x1, 0x0, 0x10, 0xe, 0xe, 0x4, 0x1e, 0xe, 0x1e, 0x1f, 0x1f, 0xe, 0x11, 0xe, 0x7, 0x11, 0x10, 0x11, 0x11, 0xe, 0x1e, 0xe, 0x1e, 0xe, 0x1f, 0x11, 0x11, 0x11, 0x11, 0x11, 0x1f, 0xe, 0x10, 0xe, 0x4, 0x0, 0xc, 0x0, 0x10, 0x0, 0x1, 0x0, 0x6, 0x0, 0x10, 0x4, 0x2, 0x10, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x8, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x3, 0x4, 0x18, 0x9, 0x0,
+  0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x4, 0xa, 0xa, 0xe, 0x15, 0x14, 0x4, 0x4, 0x4, 0x11, 0x4, 0x0, 0x0, 0x0, 0x1, 0xa, 0xc, 0x11, 0x1, 0x6, 0x10, 0x8, 0x1, 0x11, 0x11, 0x4, 0x4, 0x2, 0x0, 0x8, 0x11, 0x11, 0xa, 0x9, 0x11, 0x9, 0x10, 0x10, 0x11, 0x11, 0x4, 0x1, 0x12, 0x10, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x4, 0x11, 0x11, 0x11, 0x11, 0x11, 0x1, 0x8, 0x10, 0x2, 0xa, 0x0, 0x4, 0x0, 0x10, 0x0, 0x1, 0x0, 0x9, 0x0, 0x10, 0x0, 0x0, 0x10, 0x4, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x8, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x4, 0x4, 0x4, 0x15, 0x0,
+  0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x4, 0xa, 0x1f, 0x14, 0xa, 0x14, 0x8, 0x8, 0x2, 0xa, 0x4, 0x0, 0x0, 0x0, 0x2, 0x11, 0x14, 0x1, 0x2, 0xa, 0x16, 0x10, 0x2, 0x11, 0x13, 0xe, 0xe, 0x4, 0x1f, 0x4, 0x2, 0x13, 0x11, 0x9, 0x10, 0x9, 0x10, 0x10, 0x10, 0x11, 0x4, 0x1, 0x14, 0x10, 0x1b, 0x19, 0x11, 0x11, 0x11, 0x11, 0x10, 0x4, 0x11, 0x11, 0x11, 0xa, 0xa, 0x2, 0x8, 0x8, 0x2, 0x11, 0x0, 0x2, 0xe, 0x16, 0xe, 0xd, 0xe, 0x8, 0xd, 0x16, 0xc, 0x6, 0x11, 0x4, 0x1a, 0x16, 0xe, 0x16, 0xd, 0x16, 0xe, 0x1e, 0x11, 0x11, 0x11, 0x11, 0x11, 0x1f, 0x2, 0x4, 0x8, 0x12, 0x0,
+  0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 0xa, 0xe, 0x4, 0x8, 0x0, 0x8, 0x2, 0x1f, 0x1f, 0x0, 0x1f, 0x0, 0x4, 0x11, 0x4, 0x6, 0x6, 0x12, 0x19, 0x16, 0x2, 0xe, 0xd, 0x4, 0x4, 0x8, 0x0, 0x2, 0x4, 0x15, 0x11, 0xe, 0x10, 0x9, 0x1e, 0x1e, 0x10, 0x1f, 0x4, 0x1, 0x18, 0x10, 0x15, 0x15, 0x11, 0x1e, 0x11, 0x1e, 0xe, 0x4, 0x11, 0xa, 0x15, 0x4, 0x4, 0x4, 0x8, 0x4, 0x2, 0x0, 0x0, 0x0, 0x1, 0x19, 0x11, 0x13, 0x11, 0x1e, 0x12, 0x19, 0x4, 0x2, 0x12, 0x4, 0x15, 0x19, 0x11, 0x19, 0x13, 0x19, 0x10, 0x8, 0x11, 0x11, 0x11, 0xa, 0x11, 0x2, 0xc, 0x4, 0x6, 0x0, 0x0,
+  0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 0x1f, 0x5, 0xa, 0x15, 0x0, 0x8, 0x2, 0xa, 0x4, 0x0, 0x0, 0x0, 0x8, 0x11, 0x4, 0x8, 0x1, 0x1f, 0x1, 0x19, 0x4, 0x11, 0x1, 0x0, 0x0, 0x4, 0x1f, 0x4, 0x4, 0x16, 0x1f, 0x9, 0x10, 0x9, 0x10, 0x10, 0x13, 0x11, 0x4, 0x1, 0x14, 0x10, 0x11, 0x13, 0x11, 0x10, 0x11, 0x14, 0x1, 0x4, 0x11, 0xa, 0x15, 0xa, 0x4, 0x8, 0x8, 0x2, 0x2, 0x0, 0x0, 0x0, 0xf, 0x11, 0x10, 0x11, 0x1f, 0x8, 0xc, 0x11, 0x4, 0x2, 0x1c, 0x4, 0x15, 0x11, 0x11, 0x19, 0x13, 0x10, 0xe, 0x8, 0x11, 0xa, 0x15, 0x4, 0x13, 0x4, 0x2, 0x4, 0x8, 0x0, 0x0,
+  0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xa, 0xe, 0x15, 0x12, 0x0, 0x4, 0x4, 0x11, 0x4, 0x6, 0x0, 0x4, 0x10, 0xa, 0x4, 0x10, 0x11, 0x2, 0x11, 0x11, 0x8, 0x11, 0x2, 0x4, 0x6, 0x2, 0x0, 0x8, 0x0, 0x10, 0x11, 0x9, 0x11, 0x9, 0x10, 0x10, 0x11, 0x11, 0x4, 0x11, 0x12, 0x10, 0x11, 0x11, 0x11, 0x10, 0x15, 0x12, 0x11, 0x4, 0x11, 0xa, 0x1b, 0x11, 0x4, 0x10, 0x8, 0x1, 0x2, 0x0, 0x0, 0x0, 0x11, 0x19, 0x11, 0x13, 0x10, 0x8, 0x10, 0x11, 0x4, 0x2, 0x12, 0x4, 0x15, 0x11, 0x11, 0x16, 0xd, 0x10, 0x1, 0x9, 0x13, 0xa, 0x15, 0xa, 0xd, 0x8, 0x4, 0x4, 0x4, 0x0, 0x0,
+  0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 0xa, 0x4, 0x12, 0xd, 0x0, 0x2, 0x8, 0x0, 0x0, 0x4, 0x0, 0xe, 0x10, 0x4, 0x1f, 0x1f, 0xe, 0x2, 0xe, 0xe, 0x8, 0xe, 0xc, 0xe, 0x4, 0x1, 0x0, 0x10, 0x4, 0xe, 0x11, 0x1e, 0xe, 0x1e, 0x1f, 0x10, 0xe, 0x11, 0xe, 0xe, 0x11, 0x1f, 0x11, 0x11, 0xe, 0x10, 0xe, 0x11, 0xe, 0x4, 0xe, 0x4, 0x11, 0x11, 0x4, 0x1f, 0xe, 0x1, 0xe, 0x0, 0x0, 0x0, 0xf, 0x16, 0xe, 0xd, 0xe, 0x8, 0xe, 0x11, 0xe, 0x12, 0x11, 0xe, 0x11, 0x11, 0xe, 0x10, 0x1, 0x10, 0x1e, 0x6, 0xd, 0x4, 0xa, 0x11, 0x1, 0x1f, 0x3, 0x4, 0x18, 0x0, 0x0,
+  0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x8, 0x0, 0x4, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x4, 0x8, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x1f, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x11, 0x0, 0x0, 0x12, 0x0, 0x0, 0x0, 0x0, 0x0, 0x10, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x11, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
+  0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xe, 0x0, 0x0, 0xc, 0x0, 0x0, 0x0, 0x0, 0x0, 0x10, 0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xe, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
+};

Added: packages/drawmap/branches/upstream/current/gtopo30.c
===================================================================
--- packages/drawmap/branches/upstream/current/gtopo30.c	                        (rev 0)
+++ packages/drawmap/branches/upstream/current/gtopo30.c	2007-05-18 12:29:03 UTC (rev 834)
@@ -0,0 +1,911 @@
+/*
+ * =========================================================================
+ * gtopo30.c - Routines to handle GTOPO30 data.
+ * Copyright (c) 2000  Fred M. Erickson
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2, or (at your option)
+ * any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program; if not, write to the Free Software
+ * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
+ * =========================================================================
+ */
+
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <stdlib.h>
+#include <unistd.h>
+#include <stdio.h>
+#include <math.h>
+#include <errno.h>
+#include <string.h>
+#include "drawmap.h"
+#include "dem.h"
+
+
+
+int parse_gtopo30_hdr(char *, struct dem_corners *, struct dem_record_type_a *, struct datum *, long *, long *, long *);
+
+
+
+
+/*
+ * Process a GTOPO30 file.  These files use the Geographic Planimetric
+ * Reference System and have samples spaced by 30 arc-seconds.
+ *
+ * This function returns 0 if it allocates memory and reads in the data.
+ * It returns 1 if it doesn't allocate memory.
+ */
+int
+process_gtopo30(char *file_name, struct image_corners *image_corners,
+		struct dem_corners *dem_corners, struct dem_record_type_a *dem_a, struct datum *dem_datum, long info_flag)
+{
+	long i, j, k, l;
+	long j_size;
+	unsigned char *ptr;
+	unsigned char buf[8 * DEM_RECORD_LENGTH];
+	ssize_t ret_val;
+	int interp_size;
+	long dem_size_x, dem_size_y;
+	long nbytes;
+	long nodata;
+	double lat_low, lat_high, long_low, long_high;
+	long i_low, j_low;
+	long i_high, j_high;
+	long min_elev, max_elev;
+	long length;
+	int fdesc_in;
+	long gz_flag;
+	ssize_t (*read_function)();
+	char *unswabbed;
+	short *swabbed;
+	long byte_order;
+	long upper_case_flag;
+	char tmp_file_name[MAX_FILE_NAME];
+	double lat_tmp;
+
+
+
+	/*
+	 * find the native byte order of this machine.
+	 */
+	byte_order = swab_type();
+
+
+	/*
+	 * Parse the GTOPO30 HDR file.
+	 */
+	if (parse_gtopo30_hdr(file_name, dem_corners, dem_a, dem_datum, &nbytes, &nodata, &gz_flag) != 0)  {
+		/* If there was a failure, the error message was printed by parse_gtopo30_hdr(). */
+		return 1;
+	}
+
+
+	/*
+	 * If the DEM data don't overlap the image, then ignore them.
+	 *
+	 * If the user didn't specify latitude/longitude ranges for the image,
+	 * then we simply use this DEM to determine those boundaries.  In this
+	 * latter case, no overlap check is necessary (or possible) since the
+	 * image boundaries will be determined later.
+	 *
+	 * The GTOPO30 data generally cover very large areas.
+	 * Because of this, we don't pass back an array of the
+	 * entire GTOPO30 elevation data.  Rather, we only pass
+	 * back the portion that falls inside the image boundaries.
+	 * If there were no image boundaries specified, then we
+	 * pass back the whole gigantic thing.
+	 */
+	if ((info_flag == 0) && (image_corners->sw_lat < image_corners->ne_lat))  {
+		/* The user has specified image boundaries.  Check for overlap. */
+		if ((dem_corners->sw_lat >= image_corners->ne_lat) || ((dem_corners->nw_lat) <= image_corners->sw_lat) ||
+		    (dem_corners->sw_long >= image_corners->ne_long) || ((dem_corners->se_long) <= image_corners->sw_long))  {
+			return 1;
+		}
+
+		if (dem_corners->sw_lat < image_corners->sw_lat)  {
+			lat_low = image_corners->sw_lat;
+		}
+		else  {
+			lat_low = dem_corners->sw_lat;
+		}
+		if (dem_corners->nw_lat > image_corners->ne_lat)  {
+			lat_high = image_corners->ne_lat;
+		}
+		else  {
+			lat_high = dem_corners->nw_lat;
+		}
+		if (dem_corners->sw_long < image_corners->sw_long)  {
+			long_low = image_corners->sw_long;
+		}
+		else  {
+			long_low = dem_corners->sw_long;
+		}
+		if (dem_corners->ne_long > image_corners->ne_long)  {
+			long_high = image_corners->ne_long;
+		}
+		else  {
+			long_high = dem_corners->ne_long;
+		}
+	}
+	else  {
+		lat_low = dem_corners->sw_lat;
+		lat_high = dem_corners->nw_lat;
+		long_low = dem_corners->sw_long;
+		long_high = dem_corners->ne_long;
+	}
+
+
+	/*
+	 * Make a copy of the file name.  The one we were originally
+	 * given is still stored in the command line arguments.
+	 * It is probably a good idea not to alter those, lest we
+	 * scribble something we don't want to scribble.
+	 */
+	strncpy(tmp_file_name, file_name, MAX_FILE_NAME - 1);
+	tmp_file_name[MAX_FILE_NAME - 1] = '\0';
+	if ((length = strlen(tmp_file_name)) < 5)  {
+		/*
+		 * Excluding the initial path, the file name should have the form
+		 * *.HDR, perhaps with a ".gz" on the end.  If it isn't
+		 * at least long enough to have this form, then reject it.
+		 */
+		fprintf(stderr, "File name %s doesn't look right.\n", tmp_file_name);
+		return 1;
+	}
+	/* Check the case of the characters in the file name by examining a single character. */
+	if (gz_flag == 0)  {
+		if (tmp_file_name[length - 1] == 'r')  {
+			upper_case_flag = 0;
+		}
+		else  {
+			upper_case_flag = 1;
+		}
+	}
+	else  {
+		if (tmp_file_name[length - 4] == 'r')  {
+			upper_case_flag = 0;
+		}
+		else  {
+			upper_case_flag = 1;
+		}
+	}
+
+
+	/*
+	 * We need to modify the file name from *.HDR to *.DEM.
+	 */
+	if (upper_case_flag == 0)  {
+		if (gz_flag != 0)  {
+			strncpy(&tmp_file_name[length - 6], "dem", 3);
+		}
+		else  {
+			strncpy(&tmp_file_name[length - 3], "dem", 3);
+		}
+	}
+	else  {
+		if (gz_flag != 0)  {
+			strncpy(&tmp_file_name[length - 6], "DEM", 3);
+		}
+		else  {
+			strncpy(&tmp_file_name[length - 3], "DEM", 3);
+		}
+	}
+
+	/*
+	 * Open DEM file.
+	 */
+	length = strlen(tmp_file_name);
+	if ((strcmp(&tmp_file_name[length - 3], ".gz") == 0) || (strcmp(&tmp_file_name[length - 3], ".GZ") == 0))  {
+		gz_flag = 1;
+		if ((fdesc_in = buf_open_z(tmp_file_name, O_RDONLY)) < 0)  {
+			fprintf(stderr, "Can't open %s for reading, errno = %d\n", tmp_file_name, errno);
+			exit(0);
+		}
+		read_function = buf_read_z;
+	}
+	else  {
+		gz_flag = 0;
+		if ((fdesc_in = buf_open(tmp_file_name, O_RDONLY)) < 0)  {
+			fprintf(stderr, "Can't open %s for reading, errno = %d\n", tmp_file_name, errno);
+			exit(0);
+		}
+		read_function = buf_read;
+	}
+
+
+	/*
+	 * Now we need to read in the GTOPO30 data, and build an
+	 * array of elevation samples to return to the caller.
+	 *
+	 * First we need some space to read in the GTOPO30 DEM elevations,
+	 * and a buffer to put the data after getting the byte-order correct.
+	 */
+	unswabbed = (char *)malloc(nbytes * dem_corners->x);
+	if (unswabbed == (char *)0)  {
+		fprintf(stderr, "malloc of unswabbed failed\n");
+		exit(0);
+	}
+	swabbed = (short *)malloc(nbytes * dem_corners->x);
+	if (swabbed == (short *)0)  {
+		fprintf(stderr, "malloc of swabbed failed\n");
+		exit(0);
+	}
+
+	/*
+	 * Now we read in a row of data, get it into the correct byte order,
+	 * and stuff it into the array.
+	 *
+	 * i_low and i_high give us the lowest and (highest+1) "i" values in
+	 * the GTOPO30 source array that get copied into our output block.
+	 * Ditto for j_low and j_high.
+	 *
+	 * In the dem_corners->ptr array, the first index runs from the highest
+	 * latitude for index=0 to the lowest latitude for index=?.  The second
+	 * index runs from the lowest longitude for index=0 to the highest
+	 * longitude for index=?.  Remember that both longitudes and latitudes
+	 * are signed, so that 111W is less than 110W, and 45S is less than 44S.
+	 *
+	 * We fill the array with an extra sample in each direction, just like
+	 * the 1-degree DEMs, which have 1201 by 1201 samples.
+	 *
+	 * Note that the first sample, in the northwest corner, is offset inside
+	 * the latitude/longitude boundaries by half of one sample width.  This
+	 * is because they produced the data by combining a bunch of surrounding
+	 * samples and then put the data point smack dab in the middle of the sample
+	 * cell.  We ignore this, and just move each sample to the northwest corner
+	 * of the sample cell.  (Actually, this movement was done for us during our
+	 * call to parse_gtopo30_hdr().)  We could do some kind of fancy interpolation
+	 * to get a more "correct" value for the corner of the sample cell; or we
+	 * could change the latitude/longitude corners of the data to correspond
+	 * exactly to the locations of the samples.  However, neither of these
+	 * things seems worthwhile for the relatively low resolution GTOPO30
+	 * data.
+	 */
+	i_low = round((double)dem_corners->y * (dem_corners->nw_lat - lat_high) / (dem_corners->nw_lat - dem_corners->se_lat));
+	i_high = round((double)dem_corners->y * (dem_corners->nw_lat - lat_low) / (dem_corners->nw_lat - dem_corners->se_lat));
+	j_low = round((double)dem_corners->x * (long_low - dem_corners->nw_long) / (dem_corners->se_long - dem_corners->nw_long));
+	j_high = round((double)dem_corners->x * (long_high - dem_corners->nw_long) / (dem_corners->se_long - dem_corners->nw_long));
+
+	/*
+	 * malloc the space for the data array we will pass back.
+	 */
+	j_size = j_high - j_low + 1;
+	dem_corners->ptr = (short *)malloc(nbytes * (i_high - i_low + 1) * j_size);
+	if (dem_corners->ptr == (short *)0)  {
+		fprintf(stderr, "malloc of dem_corners->ptr failed\n");
+		exit(0);
+	}
+
+	for (i = 0; i < dem_corners->y; i++) {
+		/*
+		 * Read in the data, and convert it into an array of properly-byte-ordered
+		 * short integers.
+		 */
+		if ((ret_val = read_function(fdesc_in, unswabbed, nbytes * dem_corners->x)) != (nbytes * dem_corners->x))  {
+			fprintf(stderr, "Read failure on DEM file.  ret_val = %d\n", ret_val);
+			exit(0);
+		}
+		if (i < i_low)  {
+			continue;
+		}
+		for (j = 0; j < dem_corners->x; j++)  {
+			if (nbytes == 1)  {
+				swabbed[j] = 0x00ff & (short)unswabbed[j];
+			}
+			else  {
+				swabbed[j] = 0x00ff & (short)unswabbed[j];
+				if (byte_order == 0)  {
+					swabbed[j] = (((short)unswabbed[(j << 1) + 1] << 8) & 0xff00) + ((short)unswabbed[j << 1] & 0x00ff);
+				}
+				else  {
+					swabbed[j] = (((short)unswabbed[j << 1] << 8) & 0xff00) + ((short)unswabbed[(j << 1) + 1] & 0x00ff);
+				}
+				/*
+				 * Sub-sea-level areas may be filled with a flag number instead of
+				 * elevations.  If so, then set the elevation to zero.
+				 */
+				if (swabbed[j] == nodata)  {
+					swabbed[j] = 0;
+				}
+			}
+		}
+
+		/*
+		 * transfer the data into the dem_corners->ptr array.
+		 */
+		if (i <= (i_high - 1))  {
+			for (j = j_low; j < j_high; j++)  {
+
+// Debugging code
+//if (((i - i_low) < 0) || ((i - i_low) > (i_high - i_low - 1)))  {
+//fprintf(stderr, "i index out: %d       i=%d  j=%d i_low=%d i_high=%d j_low=%d j_high=%d k=%d l=%d\n", i - i_low, i, j, i_low, i_high, j_low, j_high, k, l);
+//}
+//if (((j - j_low) < 0) || ((j - j_low) > (j_high - j_low - 1)))  {
+//fprintf(stderr, "j index out: %d       i=%d  j=%d i_low=%d i_high=%d j_low=%d j_high=%d k=%d l=%d\n", j - j_low, i, j, i_low, i_high, j_low, j_high, k, l);
+//}
+//if (((i - i_low) == 0) || ((i - i_low) == (i_high - i_low - 1)))  {
+//fprintf(stderr, "i_index %d touches boundary\n", i - i_low);
+//}
+//if (((j - j_low) == 0) || ((j - j_low) == (j_high - j_low - 1)))  {
+//fprintf(stderr, "j_index %d touches boundary\n", j - j_low);
+//}
+
+				*(dem_corners->ptr + j_size * (i - i_low) + j - j_low) = swabbed[j];
+				if (swabbed[j] > max_elev)  {
+					max_elev = swabbed[j];
+				}
+				if (swabbed[j] < min_elev)  {
+					min_elev = swabbed[j];
+				}
+			}
+			/*
+			 * We still have the last column of the array unfilled.
+			 * If there is additional data available adjacent to the region
+			 * of interest, then fill this column from there.  Otherwise
+			 * we will fill it later by duplicating the second-to-last column.
+			 */
+			if (j_high < dem_corners->x)  {
+
+// Debugging code
+//if (((i - i_low) < 0) || ((i - i_low) > (i_high - i_low - 1)))  {
+//fprintf(stderr, "i index out: %d       i=%d  j=%d i_low=%d i_high=%d j_low=%d j_high=%d k=%d l=%d\n", i - i_low, i, j, i_low, i_high, j_low, j_high, k, l);
+//}
+//if (((i - i_low) == 0) || ((i - i_low) == (i_high - i_low - 1)))  {
+//fprintf(stderr, "i_index %d touches boundary\n", i - i_low);
+//}
+
+				*(dem_corners->ptr + j_size * (i - i_low) + j_high - j_low) = swabbed[j_high];
+				if (swabbed[j_high] > max_elev)  {
+					max_elev = swabbed[j_high];
+				}
+				if (swabbed[j_high] < min_elev)  {
+					min_elev = swabbed[j_high];
+				}
+			}
+		}
+		if ((i == (i_high - 1)) && (i_high >= dem_corners->y))  {
+			break;
+		}
+		if (i == i_high)  {
+			/*
+			 * We still have the last row of the array unfilled.
+			 * If there is additional data available adjacent to the region
+			 * of interest, then fill this row from there.  Otherwise
+			 * we will fill it later by duplicating the second-to-last row.
+			 */
+			for (j = j_low; j < j_high; j++)  {
+
+// Debugging code
+//if (((j - j_low) < 0) || ((j - j_low) > (j_high - j_low - 1)))  {
+//fprintf(stderr, "j index out: %d       i=%d  j=%d i_low=%d i_high=%d j_low=%d j_high=%d k=%d l=%d\n", j - j_low, i, j, i_low, i_high, j_low, j_high, k, l);
+//}
+//if (((j - j_low) == 0) || ((j - j_low) == (j_high - j_low - 1)))  {
+//fprintf(stderr, "j_index %d touches boundary\n", j - j_low);
+//}
+
+				*(dem_corners->ptr + j_size * (i_high - i_low) + j - j_low) = swabbed[j];
+				if (swabbed[j] > max_elev)  {
+					max_elev = swabbed[j];
+				}
+				if (swabbed[j] < min_elev)  {
+					min_elev = swabbed[j];
+				}
+			}
+			/*
+			 * Now check that last little corner sample in the last column and last row.
+			 */
+			if (j_high < dem_corners->x)  {
+				*(dem_corners->ptr + j_size * (i_high - i_low) + j_high - j_low) = swabbed[j_high];
+				if (swabbed[j_high] > max_elev)  {
+					max_elev = swabbed[j_high];
+				}
+				if (swabbed[j_high] < min_elev)  {
+					min_elev = swabbed[j_high];
+				}
+			}
+			break;
+		}
+	}
+	free(swabbed);
+	free(unswabbed);
+
+	/*
+	 * At this point the array is filled in, except for the rag-tag extra row and
+	 * extra column that fills it out to even latitude/longitude.
+	 * If there was data available above i_high and j_high,
+	 * then we have already filled these extra slots from there.
+	 * If not, then we now fill them up with data from the adjacent row/column.
+	 */
+	if (j_high >= dem_corners->x)  {
+		for (i = 0; i < (i_high - i_low + 1); i++)  {
+			*(dem_corners->ptr + j_size * i + j_high - j_low) = *(dem_corners->ptr + j_size * i + j_high - j_low - 1);
+		}
+	}
+	if (i_high >= dem_corners->y)  {
+		for (j = 0; j < (j_high - j_low + 1); j++)  {
+			*(dem_corners->ptr + j_size * (i_high - i_low) + j) = *(dem_corners->ptr + j_size * (i_high - i_low - 1) + j);
+		}
+	}
+
+
+	/*
+	 * We saved the maximum and minimum elevations for the
+	 * entire 1-degree block.  We need to put these into the dem_a structure.
+	 */
+	dem_a->max_elev = max_elev;
+	dem_a->min_elev = min_elev;
+
+	/*
+	 * If (info_flag != 0)
+	 * then we return unaltered information in dem_corners.
+	 * Otherwise, we need to adjust dem_corners to reflect
+	 * what we actually stored in dem_corners->ptr.
+	 */
+	if (info_flag == 0)  {
+		/*
+		 * Redfearn's formulas aren't happy when the latitude becomes exactly -90 or 90.
+		 * twiddle them slightly for these special cases.
+		 */
+		if (lat_high == 90.0)  {
+			lat_tmp = 89.999;
+		}
+		else  {
+			lat_tmp = lat_high;
+		}
+		if (redfearn(dem_datum, &(dem_corners->nw_x_gp), &(dem_corners->nw_y_gp), &(dem_a->zone), lat_tmp, long_low, 0) != 0)  {
+			fprintf(stderr, "call to redfearn() fails.\n");
+			free(dem_corners->ptr);
+			return 1;
+		}
+		if (redfearn(dem_datum, &(dem_corners->ne_x_gp), &(dem_corners->ne_y_gp), &(dem_a->zone), lat_tmp, long_high, 0) != 0)  {
+			fprintf(stderr, "call to redfearn() fails.\n");
+			free(dem_corners->ptr);
+			return 1;
+		}
+		if (lat_low == -90.0)  {
+			lat_tmp = -89.999;
+		}
+		else  {
+			lat_tmp = lat_low;
+		}
+		if (redfearn(dem_datum, &(dem_corners->sw_x_gp), &(dem_corners->sw_y_gp), &(dem_a->zone), lat_tmp, long_low, 0) != 0)  {
+			fprintf(stderr, "call to redfearn() fails.\n");
+			free(dem_corners->ptr);
+			return 1;
+		}
+		if (redfearn(dem_datum, &(dem_corners->se_x_gp), &(dem_corners->se_y_gp), &(dem_a->zone), lat_tmp, long_high, 0) != 0)  {
+			fprintf(stderr, "call to redfearn() fails.\n");
+			free(dem_corners->ptr);
+			return 1;
+		}
+
+		dem_corners->sw_lat  = lat_low;
+		dem_corners->sw_long = long_low;
+		dem_corners->nw_lat  = lat_high;
+		dem_corners->nw_long = long_low;
+		dem_corners->ne_lat  = lat_high;
+		dem_corners->ne_long = long_high;
+		dem_corners->se_lat  = lat_low;
+		dem_corners->se_long = long_high;
+
+		dem_corners->x = j_size;
+		dem_corners->y = i_high - i_low + 1;
+	}
+
+
+	/*
+	 * Close all open files.
+	 */
+	if (gz_flag == 0)  {
+		buf_close(fdesc_in);
+	}
+	else  {
+		buf_close_z(fdesc_in);
+	}
+
+
+	return 0;
+}
+
+
+
+
+
+/*
+ * This routine parses relevant data from a GTOPO30 ".HDR" file
+ * and inserts the converted data into the given dem_a storage structure.
+ */
+int
+parse_gtopo30_hdr(char *file_name, struct dem_corners *dem_corners, struct dem_record_type_a *dem_a, struct datum *dem_datum, long *nbytes, long *nodata, long *gz_flag)
+{
+	long i, j, k, l;
+	long length;
+	int fdesc_in;
+	ssize_t (*read_function)();
+	char buf[DEM_RECORD_LENGTH + 1];	// Add 1 for the null terminator produced by sprintf().
+	ssize_t ret_val;
+	long nrows = -1;	// Typical value:   6000
+	long ncols = -1;	// Typical value:   4800
+	long nbands = -1;	// Typical value:      1
+	long nbits = -1;	// Typical value:      8
+	long bandrowbytes = -1;	// Typical value:   4800
+	long totalrowbytes = -1;// Typical value:   4800
+	long bandgapbytes = -1;	// Typical value:      0
+	double ulxmap = -181.0;	// Typical value:    -99.99583333333334
+	double ulymap = -91.0;	// Typical value:     39.99583333333333
+	double xdim = -100.0;	// Typical value:      0.00833333333333
+	double ydim = -100.0;	// Typical value:      0.00833333333333
+	double se_lat;		// latitude of southeast corner of data
+	double se_long;		// longitude of southeast corner of data
+	long key_index, key_end;
+	long value_index, value_end;
+	double lat_tmp, long_tmp;
+
+	*nbytes = -1;
+	*nodata = -9999;
+
+
+	/*
+	 * Open the header file.
+	 */
+	length = strlen(file_name);
+	if ((strcmp(&file_name[length - 3], ".gz") == 0) || (strcmp(&file_name[length - 3], ".GZ") == 0))  {
+		*gz_flag = 1;
+		if ((fdesc_in = buf_open_z(file_name, O_RDONLY)) < 0)  {
+			fprintf(stderr, "Can't open %s for reading, errno = %d\n", file_name, errno);
+			exit(0);
+		}
+		read_function = get_a_line_z;
+	}
+	else  {
+		*gz_flag = 0;
+		if ((fdesc_in = buf_open(file_name, O_RDONLY)) < 0)  {
+			fprintf(stderr, "Can't open %s for reading, errno = %d\n", file_name, errno);
+			exit(0);
+		}
+		read_function = get_a_line;
+	}
+
+
+
+	/*
+	 * Read and parse each line of the HDR file.
+	 */
+	while ((ret_val = read_function(fdesc_in, buf, DEM_RECORD_LENGTH)) > 0)  {
+		/* Strip off any leading white space.  There shouldn't be any, but... */
+		for (key_index = 0; key_index < ret_val; key_index++)  {
+			if ((buf[key_index] != ' ') && (buf[key_index] != '\t'))  {
+				break;
+			}
+		}
+		/* Find the end of the key. */
+		for (key_end = key_index; key_end < ret_val; key_end++)  {
+			if ((buf[key_end] == '\n') || (buf[key_end] == '\r') ||
+			    (buf[key_end] == ' ') || (buf[key_end] == '\t'))  {
+				break;
+			}
+		}
+		if ((key_end == ret_val) || (buf[key_end] == '\n') || (buf[key_end] == '\r'))  {
+			fprintf(stderr, "Line:  \"%.*s\" does not contain a keyword/value pair.  Ignoring.", ret_val, buf);
+			continue;
+		}
+		/* Find the beginning of the value. */
+		for (value_index = key_end; value_index < ret_val; value_index++)  {
+			if ((buf[value_index] != ' ') && (buf[value_index] != '\t') &&
+			    (buf[value_index] != '\n') && (buf[value_index] != '\r'))  {
+				break;
+			}
+		}
+		if (value_index == ret_val)  {
+			fprintf(stderr, "Warning:  Line \"%.*s\" does not contain a keyword/value pair.  Ignoring.", ret_val, buf);
+			continue;
+		}
+		/* Find the end of the value. */
+		for (value_end = value_index; value_end < ret_val; value_end++)  {
+			if ((buf[value_end] == '\n') || (buf[value_end] == '\r') ||
+			    (buf[value_end] == ' ') || (buf[value_end] == '\t'))  {
+				break;
+			}
+		}
+
+		/* Null-terminate both the key and the value. */
+		buf[key_end] = '\0';
+		buf[value_end] = '\0';
+
+		/*
+		 * Search for the key in the list of known keys.
+		 *
+		 * Note:  The comments describing each keyword are
+		 * copied straight out of a gtopo30 README.TXT file.
+		 */
+		if (strcmp(&buf[key_index], "BYTEORDER") == 0)  {
+			/*
+			 * BYTEORDER      byte order in which image pixel values are stored
+			 * M = Motorola byte order (most significant byte first)
+			 */
+			if ((buf[value_index] != 'M') || (buf[value_index + 1] != '\0'))  {
+				fprintf(stderr, "Warning:  Unrecognized BYTEORDER (%s).  M is assumed.\n", &buf[value_index]);
+			}
+		}
+		else if (strcmp(&buf[key_index], "LAYOUT") == 0)  {
+			/*
+			 * LAYOUT         organization of the bands in the file
+			 * BIL = band interleaved by line (note: the source map is
+			 * a single band image)
+			 */
+			if (strcmp(&buf[value_index], "BIL") != 0)  {
+				fprintf(stderr, "Warning:  Unrecognized LAYOUT code (%s).  BIL is assumed.\n", &buf[value_index]);
+			}
+		}
+		else if (strcmp(&buf[key_index], "NROWS") == 0)  {
+			/*
+			 * NROWS          number of rows in the image
+			 */
+			nrows = strtol(&buf[value_index], (char **)0, 10);
+		}
+		else if (strcmp(&buf[key_index], "NCOLS") == 0)  {
+			/*
+			 * NCOLS          number of columns in the image
+			 */
+			ncols = strtol(&buf[value_index], (char **)0, 10);
+		}
+		else if (strcmp(&buf[key_index], "NBANDS") == 0)  {
+			/*
+			 * NBANDS         number of spectral bands in the image (1 for the source map)
+			 */
+			nbands = strtol(&buf[value_index], (char **)0, 10);
+			if (nbands != 1)  {
+				fprintf(stderr, "NBANDS value (%d) is not 1.  Can't handle it.\n", nbands);
+				return 1;
+			}
+		}
+		else if (strcmp(&buf[key_index], "NBITS") == 0)  {
+			/*
+			 * NBITS          number of bits per pixel (8 for the source map)
+			 */
+			nbits = strtol(&buf[value_index], (char **)0, 10);
+			if (nbits & 0x7)  {
+				fprintf(stderr, "NBITS value (%d) not divisible by 8.  Can't handle it.\n", nbits);
+				return 1;
+			}
+			*nbytes = nbits >> 3;
+		}
+		else if (strcmp(&buf[key_index], "BANDROWBYTES") == 0)  {
+			/*
+			 * BANDROWBYTES   number of bytes per band per row (the number of columns for
+			 * an 8-bit source map)
+			 */
+			bandrowbytes = strtol(&buf[value_index], (char **)0, 10);
+		}
+		else if (strcmp(&buf[key_index], "TOTALROWBYTES") == 0)  {
+			/*
+			 * TOTALROWBYTES  total number of bytes of data per row (the number of columns
+			 * for a single band 8-bit source map)
+			 */
+			totalrowbytes = strtol(&buf[value_index], (char **)0, 10);
+		}
+		else if (strcmp(&buf[key_index], "BANDGAPBYTES") == 0)  {
+			/*
+			 * BANDGAPBYTES   the number of bytes between bands in a BSQ format image
+			 * (0 for the source map)
+			 */
+			bandgapbytes = strtol(&buf[value_index], (char **)0, 10);
+			if (bandgapbytes != 0)  {
+				fprintf(stderr, "BANDGAPBYTES value (%d) is not zero.  Can't handle it.\n", bandgapbytes);
+				return 1;
+			}
+		}
+		else if (strcmp(&buf[key_index], "NODATA") == 0)  {
+			/*
+			 * NODATA         value used for masking purposes
+			 */
+			*nodata = strtol(&buf[value_index], (char **)0, 10);
+			if (*nodata > 0)  {
+				fprintf(stderr, "Warning:  NODATA value (%d) is greater than zero.  This may not be correct.\n", *nodata);
+			}
+		}
+		else if (strcmp(&buf[key_index], "ULXMAP") == 0)  {
+			/*
+			 * ULXMAP         longitude of the center of the upper-left pixel (decimal degrees)
+			 */
+			ulxmap = strtod(&buf[value_index], (char **)0);
+		}
+		else if (strcmp(&buf[key_index], "ULYMAP") == 0)  {
+			/*
+			 * ULYMAP         latitude  of the center of the upper-left pixel (decimal degrees)
+			 */
+			ulymap = strtod(&buf[value_index], (char **)0);
+		}
+		else if (strcmp(&buf[key_index], "XDIM") == 0)  {
+			/*
+			 * XDIM           x dimension of a pixel in geographic units (decimal degrees)
+			 */
+			xdim = strtod(&buf[value_index], (char **)0);
+		}
+		else if (strcmp(&buf[key_index], "YDIM") == 0)  {
+			/*
+			 * YDIM           y dimension of a pixel in geographic units (decimal degrees)
+			 */
+			ydim = strtod(&buf[value_index], (char **)0);
+		}
+		else  {
+			/*
+			 * During debugging we print out any unknown keywords.
+			 * For production use, we don't.
+			 */
+			// fprintf(stderr, "Warning:  Unknown keyword ignored:  \"%s\".\n", &buf[key_index]);
+		}
+	}
+	if (*gz_flag == 0)  {
+		buf_close(fdesc_in);
+	}
+	else  {
+		buf_close_z(fdesc_in);
+	}
+
+	/*
+	 * Do a few more sanity checks on the parameters.
+	 */
+	if (nrows <= 0)  {
+		fprintf(stderr, "NROWS value (%d) doesn't make sense.\n", nrows);
+		return 1;
+	}
+	if (ncols <= 0)  {
+		fprintf(stderr, "NCOLS value (%d) doesn't make sense.\n", ncols);
+		return 1;
+	}
+	if ((*nbytes != 1) && (*nbytes != 2))  {
+		fprintf(stderr, "NBITS value must be 8 or 16.  Can't deal with %d.\n", nbits);
+		return 1;
+	}
+	if ((bandrowbytes >= 0) && (bandrowbytes != (*nbytes * ncols)))  {
+		fprintf(stderr, "BANDROWBYTES value (%d) doesn't equal NBITS * NCOLS / 8.  Can't handle it.\n", bandrowbytes);
+		return 1;
+	}
+	if ((totalrowbytes >= 0) && (totalrowbytes != (*nbytes * ncols)))  {
+		fprintf(stderr, "TOTALROWBYTES value (%d) doesn't equal NBITS * NCOLS / 8.  Can't handle it.\n", totalrowbytes);
+		return 1;
+	}
+
+	/*
+	 * ulxmap and ulymap are offset so that the first sample is xdim/2 and ydim/2 in
+	 * from the northwest corner.  Move them back to round-numbered values.
+	 */
+	ulxmap = ulxmap - xdim / 2.0;
+	ulymap = ulymap + ydim / 2.0;
+	if ((ulxmap < -180.001) || (ulxmap > 180.001))  {
+		fprintf(stderr, "ULXMAP value (%g) is not in the range [-180, 180].\n", ulxmap);
+		return 1;
+	}
+	if ((ulymap < -90.0001) || (ulymap > 90.0001))  {
+		fprintf(stderr, "ULYMAP value (%g) is not in the range [-90, 90].\n", ulymap);
+		return 1;
+	}
+
+	/*
+	 * Find the latitude/longitude of the southeast corner.
+	 * We will need these.
+	 */
+	se_lat = (double)round(ulymap - ydim * (double)nrows);
+	se_long = (double)round(ulxmap + xdim * (double)ncols);
+
+
+	/*
+	 * All of the data from the header is processed.
+	 * Now fill the passed structures with data and return.
+	 */
+	i = strlen(file_name);
+	for (j = i - 1; j >= 0; j--)  {
+		if (file_name[j] == '/')  {
+			j++;
+			break;
+		}
+	}
+	if (j < 0)  {
+		j = 0;
+	}
+	if ((i - j) > 40)  {
+		strcpy(dem_a->title, "GTOPO30 data");
+	}
+	else  {
+		strcpy(dem_a->title, &file_name[j]);
+	}
+	dem_a->level_code = 0;
+	dem_a->plane_ref = 3;
+	dem_a->plane_units = 3;
+	dem_a->elev_units = 2;
+	dem_a->min_elev = 100000.0;
+	dem_a->max_elev = -100000.0;
+	dem_a->angle = 0.0;
+	dem_a->accuracy = 0;
+	dem_a->x_res = 30.0;
+	dem_a->y_res = 30.0;
+	dem_a->z_res = 1.0;
+	dem_a->cols = ncols;
+	dem_a->rows = nrows;
+	dem_a->horizontal_datum = 3;
+
+	/* The datum is WGS-84.  Initialize the parameters. */
+	dem_datum->a = WGS84_SEMIMAJOR;
+	dem_datum->b = WGS84_SEMIMINOR;
+	dem_datum->e_2 = WGS84_E_SQUARED;
+	dem_datum->f_inv = WGS84_F_INV;
+	dem_datum->k0 = UTM_K0;
+	dem_datum->a0 = WGS84_A0;
+	dem_datum->a2 = WGS84_A2;
+	dem_datum->a4 = WGS84_A4;
+	dem_datum->a6 = WGS84_A6;
+
+	/*
+	 * Redfearn's formulas aren't happy when the latitude becomes exactly -90 or 90.
+	 * twiddle them slightly for these special cases.
+	 */
+	if (ulymap == 90.0)  {
+		lat_tmp = 89.999;
+	}
+	else if (ulymap == -90.0)  {
+		lat_tmp = -89.999;
+	}
+	else  {
+		lat_tmp = ulymap;
+	}
+	if (redfearn(dem_datum, &(dem_a->nw_x_gp), &(dem_a->nw_y_gp), &(dem_a->zone), lat_tmp, ulxmap, 0) != 0)  {
+		fprintf(stderr, "call to redfearn() fails.\n");
+		return 1;
+	}
+	if (redfearn(dem_datum, &(dem_a->ne_x_gp), &(dem_a->ne_y_gp), &(dem_a->zone), lat_tmp, se_long, 0) != 0)  {
+		fprintf(stderr, "call to redfearn() fails.\n");
+		return 1;
+	}
+	if (se_lat == 90.0)  {
+		lat_tmp = 89.999;
+	}
+	else if (se_lat == -90.0)  {
+		lat_tmp = -89.999;
+	}
+	else  {
+		lat_tmp = se_lat;
+	}
+	if (redfearn(dem_datum, &(dem_a->sw_x_gp), &(dem_a->sw_y_gp), &(dem_a->zone), lat_tmp, ulxmap, 0) != 0)  {
+		fprintf(stderr, "call to redfearn() fails.\n");
+		return 1;
+	}
+	if (redfearn(dem_datum, &(dem_a->se_x_gp), &(dem_a->se_y_gp), &(dem_a->zone), lat_tmp, se_long, 0) != 0)  {
+		fprintf(stderr, "call to redfearn() fails.\n");
+		return 1;
+	}
+
+	dem_corners->sw_x_gp = dem_a->sw_x_gp;
+	dem_corners->sw_y_gp = dem_a->sw_y_gp;
+	dem_corners->nw_x_gp = dem_a->nw_x_gp;
+	dem_corners->nw_y_gp = dem_a->nw_y_gp;
+	dem_corners->ne_x_gp = dem_a->ne_x_gp;
+	dem_corners->ne_y_gp = dem_a->ne_y_gp;
+	dem_corners->se_x_gp = dem_a->se_x_gp;
+	dem_corners->se_y_gp = dem_a->se_y_gp;
+
+	dem_corners->sw_lat  = se_lat;
+	dem_corners->sw_long = ulxmap;
+	dem_corners->nw_lat  = ulymap;
+	dem_corners->nw_long = ulxmap;
+	dem_corners->ne_lat  = ulymap;
+	dem_corners->ne_long = se_long;
+	dem_corners->se_lat  = se_lat;
+	dem_corners->se_long = se_long;
+
+	dem_corners->x = ncols;
+	dem_corners->y = nrows;
+
+
+	return 0;
+}

Added: packages/drawmap/branches/upstream/current/gunzip.c
===================================================================
--- packages/drawmap/branches/upstream/current/gunzip.c	                        (rev 0)
+++ packages/drawmap/branches/upstream/current/gunzip.c	2007-05-18 12:29:03 UTC (rev 834)
@@ -0,0 +1,2103 @@
+/*
+ * gunzip - a library allowing programs to directly read gzipped files.
+ * Copyright (c) 1997,1998  Fred M. Erickson
+ *
+ * This software is a stripped-down modification of the standard
+ * gzip 1.2.4 distribution.  The original distribution contains
+ * the following copyright information:
+ *
+ * =========================================================================
+ * Copyright (C) 1992-1993 Jean-loup Gailly
+ * The unzip code was written and put in the public domain by Mark Adler.
+ * Portions of the lzw code are derived from the public domain 'compress'
+ * written by Spencer Thomas, Joe Orost, James Woods, Jim McKie, Steve Davies,
+ * Ken Turkowski, Dave Mack and Peter Jannesen.
+ *
+ * See the licensing information below, and the file COPYING, for the software license.
+ *
+ *  Copyright (C) 1992-1993 Jean-loup Gailly
+ *  This program is free software; you can redistribute it and/or modify
+ *  it under the terms of the GNU General Public License as published by
+ *  the Free Software Foundation; either version 2, or (at your option)
+ *  any later version.
+ *
+ *  This program is distributed in the hope that it will be useful,
+ *  but WITHOUT ANY WARRANTY; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ *  GNU General Public License for more details.
+ *
+ *  You should have received a copy of the GNU General Public License
+ *  along with this program; if not, write to the Free Software
+ *  Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
+ * =========================================================================
+ *
+ * The GNU General Public License applies equally to this modified version.
+ *
+ *
+ *
+ *
+ * This file contains incredible ugliness.  In August 1997,
+ * I extracted code from gzip version 1.2.4, and twisted
+ * it into a surreal new shape so that it can be used
+ * as a library to unzip files.
+ *
+ * To use it, one simply calls zread(), which takes the
+ * same arguments as read(), except that there is an extra argument
+ * to signal the start of reading a new file, and except that it
+ * returns uncompressed data from a gzip file.
+ *
+ * Note that a call to zread() MUST request at least as many bytes
+ * as WSIZE, or this crude gob of duct tape and baling wire will
+ * catastrophically fail.  Thus, it is usually best to use zread() by
+ * going through the routines in big_buf_io_z.c
+ *
+ * I took code from gzip.h, gzip.c, unzip.c, inflate.c, and util.c,
+ * (and maybe other files that I have forgotten).
+ * All files except for gzip.h have been pulled into this
+ * single file.
+ * The modifications consisted first of extracting only the
+ * gzip code that is relevant for decoding that subset of the
+ * various file types that is encountered in files actually
+ * produced by gzip.  In other words, this library won't decode
+ * things like the ".Z" files created by "compress" (basically because
+ * I didn't care about them, not because it was inherently difficult
+ * to handle them).
+ * Furthermore, I have made no effort to verify that it
+ * unzips every possible type of file that it ostensibly supports.
+ *
+ * Once I had the extracted code, I went in and added brute-force
+ * code to make the various inflation functions re-entrant.
+ * Not pretty.  Be afraid.  Be very afraid.
+ *
+ * I did this because I was writing an application with large data
+ * requirements, and didn't like the idea of doing 100 fork-exec
+ * operations to set up 100 pipes to funnel 100 files through
+ * 100 actual running copies of gzip.  I also did it to find out if
+ * it could be done.  It turned out to be surprisingly easy, as long
+ * as I didn't worry about maintainability or robustness or efficiency.
+ */
+
+/* ======================= parts of gzip.c, with additional code ======== */
+#include "gzip.h"
+#include <setjmp.h>
+
+#define MAX_PATH_LEN 1024
+#define RECORD_IO 0
+int last_member;      /* set for .zip and .Z files */
+int force = 0;
+int list = 0;         /* list the file contents (-l) */
+long header_bytes;   /* number of bytes in gzip header */
+int verbose = 0;      /* be verbose (-v) */
+
+#ifndef MAKE_LEGAL_NAME
+#  ifdef NO_MULTIPLE_DOTS
+#    define MAKE_LEGAL_NAME(name)   make_simple_name(name)
+#  else
+#    define MAKE_LEGAL_NAME(name)
+#  endif
+#endif
+
+#define get_char() get_byte()
+
+int ifd, ofd;
+unsigned insize;           /* valid bytes in inbuf */
+unsigned inptr;            /* index of next byte to be processed in inbuf */
+unsigned outcnt;           /* bytes in output buffer */
+unsigned char inbuf[INBUFSIZ +INBUF_EXTRA];
+unsigned char outbuf[OUTBUFSIZ+OUTBUF_EXTRA];
+int to_stdout = 0;    /* output to stdout (-c) */
+int quiet = 0;        /* be very quiet (-q) */
+int test = 0;         /* test .gz file integrity */
+char *progname;       /* program name */
+int method = DEFLATED;/* compression method */
+int exit_code = OK;   /* program exit code */
+int (*work) OF((int infile, int outfile)) = unzip; /* function to call */
+int part_nb;          /* number of parts in .gz file */
+long time_stamp;      /* original time stamp (modification time) */
+long ifile_size;      /* input file size, -1 for devices (debug only) */
+
+long bytes_in;             /* number of input bytes */
+long bytes_out;            /* number of output bytes */
+char ifname[MAX_PATH_LEN]; /* input file name */
+char ofname[MAX_PATH_LEN]; /* output file name */
+unsigned char window[WSIZE];
+local int get_method(int);
+
+int z_control;
+int ofd = -1;
+
+jmp_buf	global_env;
+
+char	*global_buf;
+int	global_count;
+
+
+/* Test routine.  unzips a file from stdin onto stdout. */
+/* main()
+/* {
+/*	int ret_val;
+/*	char my_buf[WSIZE];
+/*
+/*	while ((ret_val = zread(0, my_buf, WSIZE)) > 0)  {
+/*		write(1, my_buf, ret_val);
+/*	}
+/*}
+*/
+
+int
+zread(int fd, char *buf, int length, int new_flag)
+{
+
+	ifd = fd;
+
+	if (new_flag != 0)  {
+		z_control = 0;
+		strcpy(ifname, "<file name unavailable>");
+		strcpy(ofname, "<file name unavailable>");
+		time_stamp = 0; /* time unknown by default */
+		ifile_size = -1L; /* convention for unknown size */
+		clear_bufs(); /* clear input and output buffers */
+		to_stdout = 1;
+		part_nb = 1;	/* Assume one part.  If there are more, they will be ignored. */
+		method = get_method(ifd);
+		if (method < 0) {
+			exit(0);
+		}
+		if (work != unzip)  {
+			fprintf(stderr, "Gzipped file of un-handle-able type.\n");
+			exit(0);
+		}
+	}
+/*	if ((*work)(ifd, ofd) != OK) return; */
+
+/*	read(ifd, inbuf, INBUFSIZ); */
+
+	global_buf = buf;
+	global_count = length;
+
+	if (setjmp(global_env) == 0)  {
+		unzip(ifd, ofd);
+	}
+
+	return global_count;
+}
+
+
+RETSIGTYPE
+abort_gzip(void)
+{
+	exit(0);
+}
+
+
+/* ========================================================================
+ * Check the magic number of the input file and update ofname if an
+ * original name was given and to_stdout is not set.
+ * Return the compression method, -1 for error, -2 for warning.
+ * Set inptr to the offset of the next byte to be processed.
+ * Updates time_stamp if there is one and --no-time is not used.
+ * This function may be called repeatedly for an input file consisting
+ * of several contiguous gzip'ed members.
+ * IN assertions: there is at least one remaining compressed member.
+ *   If the member is a zip file, it must be the only one.
+ */
+local int get_method(in)
+    int in;        /* input file descriptor */
+{
+    uch flags;     /* compression flags */
+    char magic[2]; /* magic header */
+    ulg stamp;     /* time stamp */
+
+    /* If --force and --stdout, zcat == cat, so do not complain about
+     * premature end of file: use try_byte instead of get_byte.
+     */
+    if (force && to_stdout) {
+	magic[0] = (char)try_byte();
+	magic[1] = (char)try_byte();
+	/* If try_byte returned EOF, magic[1] == 0xff */
+    } else {
+	magic[0] = (char)get_byte();
+	magic[1] = (char)get_byte();
+    }
+    method = -1;                 /* unknown yet */
+    part_nb++;                   /* number of parts in gzip file */
+    header_bytes = 0;
+    last_member = RECORD_IO;
+    /* assume multiple members in gzip file except for record oriented I/O */
+
+    if (memcmp(magic, GZIP_MAGIC, 2) == 0
+        || memcmp(magic, OLD_GZIP_MAGIC, 2) == 0) {
+
+	method = (int)get_byte();
+	if (method != DEFLATED) {
+	    fprintf(stderr,
+		    "%s: %s: unknown method %d -- get newer version of gzip\n",
+		    progname, ifname, method);
+	    exit_code = ERROR;
+	    return -1;
+	}
+	work = unzip;
+	flags  = (uch)get_byte();
+
+	if ((flags & ENCRYPTED) != 0) {
+	    fprintf(stderr,
+		    "%s: %s is encrypted -- get newer version of gzip\n",
+		    progname, ifname);
+	    exit_code = ERROR;
+	    return -1;
+	}
+	if ((flags & CONTINUATION) != 0) {
+	    fprintf(stderr,
+	   "%s: %s is a a multi-part gzip file -- get newer version of gzip\n",
+		    progname, ifname);
+	    exit_code = ERROR;
+	    if (force <= 1) return -1;
+	}
+	if ((flags & RESERVED) != 0) {
+	    fprintf(stderr,
+		    "%s: %s has flags 0x%x -- get newer version of gzip\n",
+		    progname, ifname, flags);
+	    exit_code = ERROR;
+	    if (force <= 1) return -1;
+	}
+	stamp  = (ulg)get_byte();
+	stamp |= ((ulg)get_byte()) << 8;
+	stamp |= ((ulg)get_byte()) << 16;
+	stamp |= ((ulg)get_byte()) << 24;
+	if (stamp != 0 /* && !no_time */) time_stamp = stamp;
+
+	(void)get_byte();  /* Ignore extra flags for the moment */
+	(void)get_byte();  /* Ignore OS type for the moment */
+
+	if ((flags & CONTINUATION) != 0) {
+	    unsigned part = (unsigned)get_byte();
+	    part |= ((unsigned)get_byte())<<8;
+	    if (verbose) {
+		fprintf(stderr,"%s: %s: part number %u\n",
+			progname, ifname, part);
+	    }
+	}
+	if ((flags & EXTRA_FIELD) != 0) {
+	    unsigned len = (unsigned)get_byte();
+	    len |= ((unsigned)get_byte())<<8;
+	    if (verbose) {
+		fprintf(stderr,"%s: %s: extra field of %u bytes ignored\n",
+			progname, ifname, len);
+	    }
+	    while (len--) (void)get_byte();
+	}
+
+	/* Get original file name if it was truncated */
+	if ((flags & ORIG_NAME) != 0) {
+	    if (/* no_name || */ (to_stdout && !list) || part_nb > 1) {
+		/* Discard the old name */
+		char c; /* dummy used for NeXTstep 3.0 cc optimizer bug */
+		do {c=get_byte();} while (c != 0);
+	    } else {
+		/* Copy the base name. Keep a directory prefix intact. */
+                char *p = local_basename(ofname);
+                char *base = p;
+		for (;;) {
+		    *p = (char)get_char();
+		    if (*p++ == '\0') break;
+		    if (p >= ofname+sizeof(ofname)) {
+			error("corrupted input -- file name too large");
+		    }
+		}
+                /* If necessary, adapt the name to local OS conventions: */
+                if (!list) {
+                   MAKE_LEGAL_NAME(base);
+		   if (base) list=0; /* avoid warning about unused variable */
+                }
+	    } /* no_name || to_stdout */
+	} /* ORIG_NAME */
+
+	/* Discard file comment if any */
+	if ((flags & COMMENT) != 0) {
+	    while (get_char() != 0) /* null */ ;
+	}
+	if (part_nb == 1) {
+	    header_bytes = inptr + 2*sizeof(long); /* include crc and size */
+	}
+
+    } else if (memcmp(magic, PKZIP_MAGIC, 2) == 0 && inptr == 2
+	    && memcmp((char*)inbuf, PKZIP_MAGIC, 4) == 0) {
+	/* To simplify the code, we support a zip file when alone only.
+         * We are thus guaranteed that the entire local header fits in inbuf.
+         */
+        inptr = 0;
+	work = unzip;
+	if (check_zipfile(in) != OK) return -1;
+	/* check_zipfile may get ofname from the local header */
+	last_member = 1;
+
+    }
+/*    else if (memcmp(magic, PACK_MAGIC, 2) == 0) {
+/*	work = unpack;
+/*	method = PACKED;
+/*
+/*    }
+/*    else if (memcmp(magic, LZW_MAGIC, 2) == 0) {
+/*	work = unlzw;
+/*	method = COMPRESSED;
+/*	last_member = 1;
+/*
+/*    } else if (memcmp(magic, LZH_MAGIC, 2) == 0) {
+/*	work = unlzh;
+/*	method = LZHED;
+/*	last_member = 1;
+/*
+/*    }
+*/
+    else if (force && to_stdout && !list) { /* pass input unchanged */
+	method = STORED;
+	work = copy;
+        inptr = 0;
+	last_member = 1;
+    }
+    if (method >= 0) return method;
+
+    if (part_nb == 1) {
+	fprintf(stderr, "\n%s: %s: not in gzip format\n", progname, ifname);
+	exit_code = ERROR;
+	return -1;
+    } else {
+	WARN((stderr, "\n%s: %s: decompression OK, trailing garbage ignored\n",
+	      progname, ifname));
+	return -2;
+    }
+}
+
+/* =========================== unzip.c, with modifications ========== */
+/* unzip.c -- decompress files in gzip or pkzip format.
+ * Copyright (C) 1992-1993 Jean-loup Gailly
+ * This is free software; you can redistribute it and/or modify it under the
+ * terms of the GNU General Public License, see the file COPYING.
+ *
+ * The code in this file is derived from the file funzip.c written
+ * and put in the public domain by Mark Adler.
+ */
+
+/*
+   This version can extract files in gzip or pkzip format.
+   For the latter, only the first entry is extracted, and it has to be
+   either deflated or stored.
+ */
+
+#ifdef RCSID
+static char rcsid[] = "$Id: unzip.c,v 0.13 1993/06/10 13:29:00 jloup Exp $";
+#endif
+
+/* #include "tailor.h" */
+#include "gzip.h"
+/* #include "crypt.h" */
+#define RAND_HEAD_LEN  12  /* length of encryption random header */
+
+extern int z_control;
+extern int global_count;
+
+/* PKZIP header definitions */
+#define LOCSIG 0x04034b50L      /* four-byte lead-in (lsb first) */
+#define LOCFLG 6                /* offset of bit flag */
+#define  CRPFLG 1               /*  bit for encrypted entry */
+#define  EXTFLG 8               /*  bit for extended local header */
+#define LOCHOW 8                /* offset of compression method */
+#define LOCTIM 10               /* file mod time (for decryption) */
+#define LOCCRC 14               /* offset of crc */
+#define LOCSIZ 18               /* offset of compressed size */
+#define LOCLEN 22               /* offset of uncompressed length */
+#define LOCFIL 26               /* offset of file name field length */
+#define LOCEXT 28               /* offset of extra field length */
+#define LOCHDR 30               /* size of local header, including sig */
+#define EXTHDR 16               /* size of extended local header, inc sig */
+
+
+/* Globals */
+
+int decrypt;        /* flag to turn on decryption */
+char *key;          /* not used--needed to link crypt.c */
+int pkzip = 0;      /* set for a pkzip file */
+int ext_header = 0; /* set if extended local header */
+
+/* ===========================================================================
+ * Check zip file and advance inptr to the start of the compressed data.
+ * Get ofname from the local header if necessary.
+ */
+int check_zipfile(in)
+    int in;   /* input file descriptors */
+{
+    uch *h = inbuf + inptr; /* first local header */
+
+    ifd = in;
+
+    /* Check validity of local header, and skip name and extra fields */
+    inptr += LOCHDR + SH(h + LOCFIL) + SH(h + LOCEXT);
+
+    if (inptr > insize || LG(h) != LOCSIG) {
+	fprintf(stderr, "\n%s: %s: not a valid zip file\n",
+		progname, ifname);
+	exit_code = ERROR;
+	return ERROR;
+    }
+    method = h[LOCHOW];
+    if (method != STORED && method != DEFLATED) {
+	fprintf(stderr,
+		"\n%s: %s: first entry not deflated or stored -- use unzip\n",
+		progname, ifname);
+	exit_code = ERROR;
+	return ERROR;
+    }
+
+    /* If entry encrypted, decrypt and validate encryption header */
+    if ((decrypt = h[LOCFLG] & CRPFLG) != 0) {
+	fprintf(stderr, "\n%s: %s: encrypted file -- use unzip\n",
+		progname, ifname);
+	exit_code = ERROR;
+	return ERROR;
+    }
+
+    /* Save flags for unzip() */
+    ext_header = (h[LOCFLG] & EXTFLG) != 0;
+    pkzip = 1;
+
+    /* Get ofname and time stamp from local header (to be done) */
+    return OK;
+}
+
+/* ===========================================================================
+ * Unzip in to out.  This routine works on both gzip and pkzip files.
+ *
+ * IN assertions: the buffer inbuf contains already the beginning of
+ *   the compressed data, from offsets inptr to insize-1 included.
+ *   The magic header has already been checked. The output buffer is cleared.
+ */
+int unzip(in, out)
+    int in, out;   /* input and output file descriptors */
+{
+    static ulg orig_crc = 0;       /* original crc */
+    static ulg orig_len = 0;       /* original uncompressed length */
+    static int n;
+    static uch buf[EXTHDR];        /* extended local header */
+    static int res;
+    
+    if (z_control == 5)  {
+	goto LABEL5;
+    } else if (z_control == 6)  {
+    	goto LABEL6;
+    }
+    else if ((z_control & 0xf) != 0)  {
+    	goto LABEL7;
+    }
+    else  {
+	z_control = 0;
+    	orig_crc = 0;
+    	orig_len = 0;
+    }
+
+    updcrc(NULL, 0);           /* initialize crc */
+
+    if (pkzip && !ext_header) {  /* crc and length at the end otherwise */
+	orig_crc = LG(inbuf + LOCCRC);
+	orig_len = LG(inbuf + LOCLEN);
+    }
+
+    /* Decompress */
+    if (method == DEFLATED)  {
+
+LABEL7:
+	res = inflate();
+	global_count = 0;
+
+	if (res == 3) {
+	    error("out of memory");
+	} else if (res != 0) {
+	    error("invalid compressed data--format violated");
+	}
+
+    } else if (pkzip && method == STORED) {
+
+	register ulg n = LG(inbuf + LOCLEN);
+
+	if (n != LG(inbuf + LOCSIZ) - (decrypt ? RAND_HEAD_LEN : 0)) {
+
+	    fprintf(stderr, "len %ld, siz %ld\n", n, LG(inbuf + LOCSIZ));
+	    error("invalid compressed data--length mismatch");
+	}
+	while (n--) {
+	    uch c = (uch)get_byte();
+#ifdef CRYPT
+	    if (decrypt) zdecode(c);
+#endif
+/*	    put_ubyte(c); */
+window[outcnt++]=(uch)(c);
+if (outcnt==WSIZE)  {
+z_control = 6;
+flush_window();
+}
+LABEL6:
+z_control = 0;
+	}
+	z_control = 5;
+	flush_window();
+LABEL5:
+z_control = 0;
+    } else {
+	error("internal error, invalid method");
+    }
+
+    /* Get the crc and original length */
+    if (!pkzip) {
+        /* crc32  (see algorithm.doc)
+	 * uncompressed input size modulo 2^32
+         */
+	for (n = 0; n < 8; n++) {
+	    buf[n] = (uch)get_byte(); /* may cause an error if EOF */
+	}
+	orig_crc = LG(buf);
+	orig_len = LG(buf+4);
+
+    } else if (ext_header) {  /* If extended header, check it */
+	/* signature - 4bytes: 0x50 0x4b 0x07 0x08
+	 * CRC-32 value
+         * compressed size 4-bytes
+         * uncompressed size 4-bytes
+	 */
+	for (n = 0; n < EXTHDR; n++) {
+	    buf[n] = (uch)get_byte(); /* may cause an error if EOF */
+	}
+	orig_crc = LG(buf+4);
+	orig_len = LG(buf+12);
+    }
+
+    /* Validate decompression */
+    if (orig_crc != updcrc(outbuf, 0)) {
+	error("invalid compressed data--crc error");
+    }
+    if (orig_len != (ulg)bytes_out) {
+	error("invalid compressed data--length error");
+    }
+
+    /* Check if there are more entries in a pkzip file */
+    if (pkzip && inptr + 4 < insize && LG(inbuf+inptr) == LOCSIG) {
+	if (to_stdout) {
+	    WARN((stderr,
+		  "%s: %s has more than one entry--rest ignored\n",
+		  progname, ifname));
+	} else {
+	    /* Don't destroy the input zip file */
+	    fprintf(stderr,
+		    "%s: %s has more than one entry -- unchanged\n",
+		    progname, ifname);
+	    exit_code = ERROR;
+	    ext_header = pkzip = 0;
+	    return ERROR;
+	}
+    }
+    ext_header = pkzip = 0; /* for next file */
+    return OK;
+}
+
+
+/* =========================== inflate.c, with modifications ========== */
+/* inflate.c -- Not copyrighted 1992 by Mark Adler
+   version c10p1, 10 January 1993 */
+
+/* You can do whatever you like with this source file, though I would
+   prefer that if you modify it and redistribute it that you include
+   comments to that effect with your name and the date.  Thank you.
+   [The history has been moved to the file ChangeLog.]
+ */
+
+/*
+   Inflate deflated (PKZIP's method 8 compressed) data.  The compression
+   method searches for as much of the current string of bytes (up to a
+   length of 258) in the previous 32K bytes.  If it doesn't find any
+   matches (of at least length 3), it codes the next byte.  Otherwise, it
+   codes the length of the matched string and its distance backwards from
+   the current position.  There is a single Huffman code that codes both
+   single bytes (called "literals") and match lengths.  A second Huffman
+   code codes the distance information, which follows a length code.  Each
+   length or distance code actually represents a base value and a number
+   of "extra" (sometimes zero) bits to get to add to the base value.  At
+   the end of each deflated block is a special end-of-block (EOB) literal/
+   length code.  The decoding process is basically: get a literal/length
+   code; if EOB then done; if a literal, emit the decoded byte; if a
+   length then get the distance and emit the referred-to bytes from the
+   sliding window of previously emitted data.
+
+   There are (currently) three kinds of inflate blocks: stored, fixed, and
+   dynamic.  The compressor deals with some chunk of data at a time, and
+   decides which method to use on a chunk-by-chunk basis.  A chunk might
+   typically be 32K or 64K.  If the chunk is uncompressible, then the
+   "stored" method is used.  In this case, the bytes are simply stored as
+   is, eight bits per byte, with none of the above coding.  The bytes are
+   preceded by a count, since there is no longer an EOB code.
+
+   If the data is compressible, then either the fixed or dynamic methods
+   are used.  In the dynamic method, the compressed data is preceded by
+   an encoding of the literal/length and distance Huffman codes that are
+   to be used to decode this block.  The representation is itself Huffman
+   coded, and so is preceded by a description of that code.  These code
+   descriptions take up a little space, and so for small blocks, there is
+   a predefined set of codes, called the fixed codes.  The fixed method is
+   used if the block codes up smaller that way (usually for quite small
+   chunks), otherwise the dynamic method is used.  In the latter case, the
+   codes are customized to the probabilities in the current block, and so
+   can code it much better than the pre-determined fixed codes.
+ 
+   The Huffman codes themselves are decoded using a mutli-level table
+   lookup, in order to maximize the speed of decoding plus the speed of
+   building the decoding tables.  See the comments below that precede the
+   lbits and dbits tuning parameters.
+ */
+
+
+/*
+   Notes beyond the 1.93a appnote.txt:
+
+   1. Distance pointers never point before the beginning of the output
+      stream.
+   2. Distance pointers can point back across blocks, up to 32k away.
+   3. There is an implied maximum of 7 bits for the bit length table and
+      15 bits for the actual data.
+   4. If only one code exists, then it is encoded using one bit.  (Zero
+      would be more efficient, but perhaps a little confusing.)  If two
+      codes exist, they are coded using one bit each (0 and 1).
+   5. There is no way of sending zero distance codes--a dummy must be
+      sent if there are none.  (History: a pre 2.0 version of PKZIP would
+      store blocks with no distance codes, but this was discovered to be
+      too harsh a criterion.)  Valid only for 1.93a.  2.04c does allow
+      zero distance codes, which is sent as one code of zero bits in
+      length.
+   6. There are up to 286 literal/length codes.  Code 256 represents the
+      end-of-block.  Note however that the static length tree defines
+      288 codes just to fill out the Huffman codes.  Codes 286 and 287
+      cannot be used though, since there is no length base or extra bits
+      defined for them.  Similarly, there are up to 30 distance codes.
+      However, static trees define 32 codes (all 5 bits) to fill out the
+      Huffman codes, but the last two had better not show up in the data.
+   7. Unzip can check dynamic Huffman blocks for complete code sets.
+      The exception is that a single code would not be complete (see #4).
+   8. The five bits following the block type is really the number of
+      literal codes sent minus 257.
+   9. Length codes 8,16,16 are interpreted as 13 length codes of 8 bits
+      (1+6+6).  Therefore, to output three times the length, you output
+      three codes (1+1+1), whereas to output four times the same length,
+      you only need two codes (1+3).  Hmm.
+  10. In the tree reconstruction algorithm, Code = Code + Increment
+      only if BitLength(i) is not zero.  (Pretty obvious.)
+  11. Correction: 4 Bits: # of Bit Length codes - 4     (4 - 19)
+  12. Note: length code 284 can represent 227-258, but length code 285
+      really is 258.  The last length deserves its own, short code
+      since it gets used a lot in very redundant files.  The length
+      258 is special since 258 - 3 (the min match length) is 255.
+  13. The literal/length and distance code bit lengths are read as a
+      single stream of lengths.  It is possible (and advantageous) for
+      a repeat code (16, 17, or 18) to go across the boundary between
+      the two sets of lengths.
+ */
+
+#ifdef RCSID
+static char rcsid[] = "$Id: inflate.c,v 0.14 1993/06/10 13:27:04 jloup Exp $";
+#endif
+
+#include <sys/types.h>
+
+/* #include "tailor.h" */
+
+#if defined(STDC_HEADERS) || !defined(NO_STDLIB_H)
+#  include <stdlib.h>
+#endif
+
+#include "gzip.h"
+#define slide window
+
+extern int z_control;
+
+/* Huffman code lookup table entry--this entry is four bytes for machines
+   that have 16-bit pointers (e.g. PC's in the small or medium model).
+   Valid extra bits are 0..13.  e == 15 is EOB (end of block), e == 16
+   means that v is a literal, 16 < e < 32 means that v is a pointer to
+   the next table, which codes e - 16 bits, and lastly e == 99 indicates
+   an unused code.  If a code with e == 99 is looked up, this implies an
+   error in the data. */
+struct huft {
+  uch e;                /* number of extra bits or operation */
+  uch b;                /* number of bits in this code or subcode */
+  union {
+    ush n;              /* literal, length base, or distance base */
+    struct huft *t;     /* pointer to next level of table */
+  } v;
+};
+
+
+/* Function prototypes */
+int huft_build OF((unsigned *, unsigned, unsigned, ush *, ush *,
+                   struct huft **, int *));
+int huft_free OF((struct huft *));
+int inflate_codes OF((struct huft *, struct huft *, int, int));
+int inflate_stored OF((void));
+int inflate_fixed OF((void));
+int inflate_dynamic OF((void));
+int inflate_block OF((int *));
+int inflate OF((void));
+
+
+/* The inflate algorithm uses a sliding 32K byte window on the uncompressed
+   stream to find repeated byte strings.  This is implemented here as a
+   circular buffer.  The index is updated simply by incrementing and then
+   and'ing with 0x7fff (32K-1). */
+/* It is left to other modules to supply the 32K area.  It is assumed
+   to be usable as if it were declared "uch slide[32768];" or as just
+   "uch *slide;" and then malloc'ed in the latter case.  The definition
+   must be in unzip.h, included above. */
+/* unsigned wp;             current position in slide */
+#define wp outcnt
+#define flush_output(w) (wp=(w),flush_window())
+
+/* Tables for deflate from PKZIP's appnote.txt. */
+static unsigned border[] = {    /* Order of the bit length code lengths */
+        16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15};
+static ush cplens[] = {         /* Copy lengths for literal codes 257..285 */
+        3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 15, 17, 19, 23, 27, 31,
+        35, 43, 51, 59, 67, 83, 99, 115, 131, 163, 195, 227, 258, 0, 0};
+        /* note: see note #13 above about the 258 in this list. */
+static ush cplext[] = {         /* Extra bits for literal codes 257..285 */
+        0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2,
+        3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 0, 99, 99}; /* 99==invalid */
+static ush cpdist[] = {         /* Copy offsets for distance codes 0..29 */
+        1, 2, 3, 4, 5, 7, 9, 13, 17, 25, 33, 49, 65, 97, 129, 193,
+        257, 385, 513, 769, 1025, 1537, 2049, 3073, 4097, 6145,
+        8193, 12289, 16385, 24577};
+static ush cpdext[] = {         /* Extra bits for distance codes */
+        0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6,
+        7, 7, 8, 8, 9, 9, 10, 10, 11, 11,
+        12, 12, 13, 13};
+
+
+
+/* Macros for inflate() bit peeking and grabbing.
+   The usage is:
+   
+        NEEDBITS(j)
+        x = b & mask_bits[j];
+        DUMPBITS(j)
+
+   where NEEDBITS makes sure that b has at least j bits in it, and
+   DUMPBITS removes the bits from b.  The macros use the variable k
+   for the number of bits in b.  Normally, b and k are register
+   variables for speed, and are initialized at the beginning of a
+   routine that uses these macros from a global bit buffer and count.
+
+   If we assume that EOB will be the longest code, then we will never
+   ask for bits with NEEDBITS that are beyond the end of the stream.
+   So, NEEDBITS should not read any more bytes than are needed to
+   meet the request.  Then no bytes need to be "returned" to the buffer
+   at the end of the last block.
+
+   However, this assumption is not true for fixed blocks--the EOB code
+   is 7 bits, but the other literal/length codes can be 8 or 9 bits.
+   (The EOB code is shorter than other codes because fixed blocks are
+   generally short.  So, while a block always has an EOB, many other
+   literal/length codes have a significantly lower probability of
+   showing up at all.)  However, by making the first table have a
+   lookup of seven bits, the EOB code will be found in that first
+   lookup, and so will not require that too many bits be pulled from
+   the stream.
+ */
+
+ulg bb;                         /* bit buffer */
+unsigned bk;                    /* bits in bit buffer */
+
+ush mask_bits[] = {
+    0x0000,
+    0x0001, 0x0003, 0x0007, 0x000f, 0x001f, 0x003f, 0x007f, 0x00ff,
+    0x01ff, 0x03ff, 0x07ff, 0x0fff, 0x1fff, 0x3fff, 0x7fff, 0xffff
+};
+
+#ifdef CRYPT
+  uch cc;
+#  define NEXTBYTE() \
+     (decrypt ? (cc = get_byte(), zdecode(cc), cc) : get_byte())
+#else
+#  define NEXTBYTE()  (uch)get_byte()
+#endif
+#define NEEDBITS(n) {while(k<(n)){b|=((ulg)NEXTBYTE())<<k;k+=8;}}
+#define DUMPBITS(n) {b>>=(n);k-=(n);}
+
+
+/*
+   Huffman code decoding is performed using a multi-level table lookup.
+   The fastest way to decode is to simply build a lookup table whose
+   size is determined by the longest code.  However, the time it takes
+   to build this table can also be a factor if the data being decoded
+   is not very long.  The most common codes are necessarily the
+   shortest codes, so those codes dominate the decoding time, and hence
+   the speed.  The idea is you can have a shorter table that decodes the
+   shorter, more probable codes, and then point to subsidiary tables for
+   the longer codes.  The time it costs to decode the longer codes is
+   then traded against the time it takes to make longer tables.
+
+   This results of this trade are in the variables lbits and dbits
+   below.  lbits is the number of bits the first level table for literal/
+   length codes can decode in one step, and dbits is the same thing for
+   the distance codes.  Subsequent tables are also less than or equal to
+   those sizes.  These values may be adjusted either when all of the
+   codes are shorter than that, in which case the longest code length in
+   bits is used, or when the shortest code is *longer* than the requested
+   table size, in which case the length of the shortest code in bits is
+   used.
+
+   There are two different values for the two tables, since they code a
+   different number of possibilities each.  The literal/length table
+   codes 286 possible values, or in a flat code, a little over eight
+   bits.  The distance table codes 30 possible values, or a little less
+   than five bits, flat.  The optimum values for speed end up being
+   about one bit more than those, so lbits is 8+1 and dbits is 5+1.
+   The optimum values may differ though from machine to machine, and
+   possibly even between compilers.  Your mileage may vary.
+ */
+
+
+int lbits = 9;          /* bits in base literal/length lookup table */
+int dbits = 6;          /* bits in base distance lookup table */
+
+
+/* If BMAX needs to be larger than 16, then h and x[] should be ulg. */
+#define BMAX 16         /* maximum bit length of any code (16 for explode) */
+#define N_MAX 288       /* maximum number of codes in any set */
+
+
+unsigned hufts;         /* track memory usage */
+
+
+int huft_build(b, n, s, d, e, t, m)
+unsigned *b;            /* code lengths in bits (all assumed <= BMAX) */
+unsigned n;             /* number of codes (assumed <= N_MAX) */
+unsigned s;             /* number of simple-valued codes (0..s-1) */
+ush *d;                 /* list of base values for non-simple codes */
+ush *e;                 /* list of extra bits for non-simple codes */
+struct huft **t;        /* result: starting table */
+int *m;                 /* maximum lookup bits, returns actual */
+/* Given a list of code lengths and a maximum table size, make a set of
+   tables to decode that set of codes.  Return zero on success, one if
+   the given code set is incomplete (the tables are still built in this
+   case), two if the input is invalid (all zero length codes or an
+   oversubscribed set of lengths), and three if not enough memory. */
+{
+  static unsigned a;                   /* counter for codes of length k */
+  static unsigned c[BMAX+1];           /* bit length count table */
+  static unsigned f;                   /* i repeats in table every f entries */
+  static int g;                        /* maximum code length */
+  static int h;                        /* table level */
+  static unsigned i;          /* counter, current code */
+  static unsigned j;          /* counter */
+  static int k;               /* number of bits in current code */
+  static int l;                        /* bits per table (returned in m) */
+  static unsigned *p;         /* pointer into c[], b[], or v[] */
+  static struct huft *q;      /* points to current table */
+  static struct huft r;                /* table entry for structure assignment */
+  static struct huft *u[BMAX];         /* table stack */
+  static unsigned v[N_MAX];            /* values in order of bit length */
+  static int w;               /* bits before this table == (l * h) */
+  static unsigned x[BMAX+1];           /* bit offsets, then code stack */
+  static unsigned *xp;                 /* pointer into x */
+  static int y;                        /* number of dummy codes added */
+  static unsigned z;                   /* number of entries in current table */
+
+
+  /* Generate counts for each bit length */
+  memzero(c, sizeof(c));
+  p = b;  i = n;
+  do {
+    Tracecv(*p, (stderr, (n-i >= ' ' && n-i <= '~' ? "%c %d\n" : "0x%x %d\n"), 
+	    n-i, *p));
+    c[*p]++;                    /* assume all entries <= BMAX */
+    p++;                      /* Can't combine with above line (Solaris bug) */
+  } while (--i);
+  if (c[0] == n)                /* null input--all zero length codes */
+  {
+    *t = (struct huft *)NULL;
+    *m = 0;
+    return 0;
+  }
+
+
+  /* Find minimum and maximum length, bound *m by those */
+  l = *m;
+  for (j = 1; j <= BMAX; j++)
+    if (c[j])
+      break;
+  k = j;                        /* minimum code length */
+  if ((unsigned)l < j)
+    l = j;
+  for (i = BMAX; i; i--)
+    if (c[i])
+      break;
+  g = i;                        /* maximum code length */
+  if ((unsigned)l > i)
+    l = i;
+  *m = l;
+
+
+  /* Adjust last length count to fill out codes, if needed */
+  for (y = 1 << j; j < i; j++, y <<= 1)
+    if ((y -= c[j]) < 0)
+      return 2;                 /* bad input: more codes than bits */
+  if ((y -= c[i]) < 0)
+    return 2;
+  c[i] += y;
+
+
+  /* Generate starting offsets into the value table for each length */
+  x[1] = j = 0;
+  p = c + 1;  xp = x + 2;
+  while (--i) {                 /* note that i == g from above */
+    *xp++ = (j += *p++);
+  }
+
+
+  /* Make a table of values in order of bit lengths */
+  p = b;  i = 0;
+  do {
+    if ((j = *p++) != 0)
+      v[x[j]++] = i;
+  } while (++i < n);
+
+
+  /* Generate the Huffman codes and for each, make the table entries */
+  x[0] = i = 0;                 /* first Huffman code is zero */
+  p = v;                        /* grab values in bit order */
+  h = -1;                       /* no tables yet--level -1 */
+  w = -l;                       /* bits decoded == (l * h) */
+  u[0] = (struct huft *)NULL;   /* just to keep compilers happy */
+  q = (struct huft *)NULL;      /* ditto */
+  z = 0;                        /* ditto */
+
+  /* go through the bit lengths (k already is bits in shortest code) */
+  for (; k <= g; k++)
+  {
+    a = c[k];
+    while (a--)
+    {
+      /* here i is the Huffman code of length k bits for value *p */
+      /* make tables up to required level */
+      while (k > w + l)
+      {
+        h++;
+        w += l;                 /* previous table always l bits */
+
+        /* compute minimum size table less than or equal to l bits */
+        z = (z = g - w) > (unsigned)l ? l : z;  /* upper limit on table size */
+        if ((f = 1 << (j = k - w)) > a + 1)     /* try a k-w bit table */
+        {                       /* too few codes for k-w bit table */
+          f -= a + 1;           /* deduct codes from patterns left */
+          xp = c + k;
+          while (++j < z)       /* try smaller tables up to z bits */
+          {
+            if ((f <<= 1) <= *++xp)
+              break;            /* enough codes to use up j bits */
+            f -= *xp;           /* else deduct codes from patterns */
+          }
+        }
+        z = 1 << j;             /* table entries for j-bit table */
+
+        /* allocate and link in new table */
+        if ((q = (struct huft *)malloc((z + 1)*sizeof(struct huft))) ==
+            (struct huft *)NULL)
+        {
+          if (h)
+            huft_free(u[0]);
+          return 3;             /* not enough memory */
+        }
+        hufts += z + 1;         /* track memory usage */
+        *t = q + 1;             /* link to list for huft_free() */
+        *(t = &(q->v.t)) = (struct huft *)NULL;
+        u[h] = ++q;             /* table starts after link */
+
+        /* connect to last table, if there is one */
+        if (h)
+        {
+          x[h] = i;             /* save pattern for backing up */
+          r.b = (uch)l;         /* bits to dump before this table */
+          r.e = (uch)(16 + j);  /* bits in this table */
+          r.v.t = q;            /* pointer to this table */
+          j = i >> (w - l);     /* (get around Turbo C bug) */
+          u[h-1][j] = r;        /* connect to last table */
+        }
+      }
+
+      /* set up table entry in r */
+      r.b = (uch)(k - w);
+      if (p >= v + n)
+        r.e = 99;               /* out of values--invalid code */
+      else if (*p < s)
+      {
+        r.e = (uch)(*p < 256 ? 16 : 15);    /* 256 is end-of-block code */
+        r.v.n = (ush)(*p);             /* simple code is just the value */
+	p++;                           /* one compiler does not like *p++ */
+      }
+      else
+      {
+        r.e = (uch)e[*p - s];   /* non-simple--look up in lists */
+        r.v.n = d[*p++ - s];
+      }
+
+      /* fill code-like entries with r */
+      f = 1 << (k - w);
+      for (j = i >> w; j < z; j += f)
+        q[j] = r;
+
+      /* backwards increment the k-bit code i */
+      for (j = 1 << (k - 1); i & j; j >>= 1)
+        i ^= j;
+      i ^= j;
+
+      /* backup over finished tables */
+      while ((i & ((1 << w) - 1)) != x[h])
+      {
+        h--;                    /* don't need to update q */
+        w -= l;
+      }
+    }
+  }
+
+
+  /* Return true (1) if we were given an incomplete table */
+  return y != 0 && g != 1;
+}
+
+
+
+int huft_free(t)
+struct huft *t;         /* table to free */
+/* Free the malloc'ed tables built by huft_build(), which makes a linked
+   list of the tables it made, with the links in a dummy first entry of
+   each table. */
+{
+  static struct huft *p, *q;
+
+
+  /* Go through linked list, freeing from the malloced (t[-1]) address. */
+  p = t;
+  while (p != (struct huft *)NULL)
+  {
+    q = (--p)->v.t;
+    free((char*)p);
+    p = q;
+  } 
+  return 0;
+}
+
+
+int inflate_codes(tl, td, bl, bd)
+struct huft *tl, *td;   /* literal/length and distance decoder tables */
+int bl, bd;             /* number of bits decoded by tl[] and td[] */
+/* inflate (decompress) the codes in a deflated (compressed) block.
+   Return an error code or zero if it all goes ok. */
+{
+  static unsigned e;  /* table entry flag/number of extra bits */
+  static unsigned n, d;        /* length and index for copy */
+  static unsigned w;           /* current window position */
+  static struct huft *t;       /* pointer to table entry */
+  static unsigned ml, md;      /* masks for bl and bd bits */
+  static ulg b;       /* bit buffer */
+  static unsigned k;  /* number of bits in bit buffer */
+
+  if ((z_control & 0xf) == 1)  {
+  	goto LABEL1;
+  }
+  if ((z_control & 0xf) == 2)  {
+  	goto LABEL2;
+  }
+
+  /* make local copies of globals */
+  b = bb;                       /* initialize bit buffer */
+  k = bk;
+  w = wp;                       /* initialize window position */
+
+  /* inflate the coded data */
+  ml = mask_bits[bl];           /* precompute masks for speed */
+  md = mask_bits[bd];
+  for (;;)                      /* do until end of block */
+  {
+    NEEDBITS((unsigned)bl)
+    if ((e = (t = tl + ((unsigned)b & ml))->e) > 16)
+      do {
+        if (e == 99)
+          return 1;
+        DUMPBITS(t->b)
+        e -= 16;
+        NEEDBITS(e)
+      } while ((e = (t = t->v.t + ((unsigned)b & mask_bits[e]))->e) > 16);
+    DUMPBITS(t->b)
+    if (e == 16)                /* then it's a literal */
+    {
+      slide[w++] = (uch)t->v.n;
+      Tracevv((stderr, "%c", slide[w-1]));
+      if (w == WSIZE)
+      {
+	z_control |= 1;
+        flush_output(w);
+LABEL1:
+	z_control &= ~1;
+        w = 0;
+      }
+    }
+    else                        /* it's an EOB or a length */
+    {
+      /* exit if end of block */
+      if (e == 15)
+        break;
+
+      /* get length of block to copy */
+      NEEDBITS(e)
+      n = t->v.n + ((unsigned)b & mask_bits[e]);
+      DUMPBITS(e);
+
+      /* decode distance of block to copy */
+      NEEDBITS((unsigned)bd)
+      if ((e = (t = td + ((unsigned)b & md))->e) > 16)
+        do {
+          if (e == 99)
+            return 1;
+          DUMPBITS(t->b)
+          e -= 16;
+          NEEDBITS(e)
+        } while ((e = (t = t->v.t + ((unsigned)b & mask_bits[e]))->e) > 16);
+      DUMPBITS(t->b)
+      NEEDBITS(e)
+      d = w - t->v.n - ((unsigned)b & mask_bits[e]);
+      DUMPBITS(e)
+      Tracevv((stderr,"\\[%d,%d]", w-d, n));
+
+      /* do the copy */
+      do {
+        n -= (e = (e = WSIZE - ((d &= WSIZE-1) > w ? d : w)) > n ? n : e);
+#if !defined(NOMEMCPY) && !defined(DEBUG)
+        if (w - d >= e)         /* (this test assumes unsigned comparison) */
+        {
+          memcpy(slide + w, slide + d, e);
+          w += e;
+          d += e;
+        }
+        else                      /* do it slow to avoid memcpy() overlap */
+#endif /* !NOMEMCPY */
+          do {
+            slide[w++] = slide[d++];
+	    Tracevv((stderr, "%c", slide[w-1]));
+          } while (--e);
+        if (w == WSIZE)
+        {
+	  z_control |= 2;
+          flush_output(w);
+LABEL2:
+	z_control &= ~2;
+          w = 0;
+        }
+      } while (n);
+    }
+  }
+
+
+  /* restore the globals from the locals */
+  wp = w;                       /* restore global window pointer */
+  bb = b;                       /* restore global bit buffer */
+  bk = k;
+
+  /* done */
+  return 0;
+}
+
+
+
+int inflate_stored()
+/* "decompress" an inflated type 0 (stored) block. */
+{
+  static unsigned n;           /* number of bytes in block */
+  static unsigned w;           /* current window position */
+  static ulg b;       /* bit buffer */
+  static unsigned k;  /* number of bits in bit buffer */
+
+  if (z_control == 3)  {
+  	goto LABEL3;
+  }
+
+
+  /* make local copies of globals */
+  b = bb;                       /* initialize bit buffer */
+  k = bk;
+  w = wp;                       /* initialize window position */
+
+
+  /* go to byte boundary */
+  n = k & 7;
+  DUMPBITS(n);
+
+
+  /* get the length and its complement */
+  NEEDBITS(16)
+  n = ((unsigned)b & 0xffff);
+  DUMPBITS(16)
+  NEEDBITS(16)
+  if (n != (unsigned)((~b) & 0xffff))
+    return 1;                   /* error in compressed data */
+  DUMPBITS(16)
+
+
+  /* read and output the compressed data */
+  while (n--)
+  {
+    NEEDBITS(8)
+    slide[w++] = (uch)b;
+    if (w == WSIZE)
+    {
+      z_control = 3;
+      flush_output(w);
+LABEL3:
+	z_control = 0;
+      w = 0;
+    }
+    DUMPBITS(8)
+  }
+
+
+  /* restore the globals from the locals */
+  wp = w;                       /* restore global window pointer */
+  bb = b;                       /* restore global bit buffer */
+  bk = k;
+  return 0;
+}
+
+
+
+int inflate_fixed()
+/* decompress an inflated type 1 (fixed Huffman codes) block.  We should
+   either replace this with a custom decoder, or at least precompute the
+   Huffman tables. */
+{
+  static int i;                /* temporary variable */
+  static struct huft *tl;      /* literal/length code table */
+  static struct huft *td;      /* distance code table */
+  static int bl;               /* lookup bits for tl */
+  static int bd;               /* lookup bits for td */
+  static unsigned l[288];      /* length list for huft_build */
+
+  if (z_control != 0)  {
+  	goto LABEL0;
+  }
+
+
+  /* set up literal table */
+  for (i = 0; i < 144; i++)
+    l[i] = 8;
+  for (; i < 256; i++)
+    l[i] = 9;
+  for (; i < 280; i++)
+    l[i] = 7;
+  for (; i < 288; i++)          /* make a complete, but wrong code set */
+    l[i] = 8;
+  bl = 7;
+  if ((i = huft_build(l, 288, 257, cplens, cplext, &tl, &bl)) != 0)
+    return i;
+
+
+  /* set up distance table */
+  for (i = 0; i < 30; i++)      /* make an incomplete code set */
+    l[i] = 5;
+  bd = 5;
+  if ((i = huft_build(l, 30, 0, cpdist, cpdext, &td, &bd)) > 1)
+  {
+    huft_free(tl);
+    return i;
+  }
+
+
+  /* decompress until an end-of-block code */
+  z_control = 0x80;
+LABEL0:
+  if (inflate_codes(tl, td, bl, bd))
+    return 1;
+    	z_control = 0;
+
+
+  /* free the decoding tables, return */
+  huft_free(tl);
+  huft_free(td);
+  return 0;
+}
+
+
+
+int inflate_dynamic()
+/* decompress an inflated type 2 (dynamic Huffman codes) block. */
+{
+  static int i;                /* temporary variables */
+  static unsigned j;
+  static unsigned l;           /* last length */
+  static unsigned m;           /* mask for bit lengths table */
+  static unsigned n;           /* number of lengths to get */
+  static struct huft *tl;      /* literal/length code table */
+  static struct huft *td;      /* distance code table */
+  static int bl;               /* lookup bits for tl */
+  static int bd;               /* lookup bits for td */
+  static unsigned nb;          /* number of bit length codes */
+  static unsigned nl;          /* number of literal/length codes */
+  static unsigned nd;          /* number of distance codes */
+#ifdef PKZIP_BUG_WORKAROUND
+  static unsigned ll[288+32];  /* literal/length and distance code lengths */
+#else
+  static unsigned ll[286+30];  /* literal/length and distance code lengths */
+#endif
+  static ulg b;       /* bit buffer */
+  static unsigned k;  /* number of bits in bit buffer */
+
+  if (z_control != 0)  {
+  	goto LABEL12;
+  }
+
+
+  /* make local bit buffer */
+  b = bb;
+  k = bk;
+
+
+  /* read in table lengths */
+  NEEDBITS(5)
+  nl = 257 + ((unsigned)b & 0x1f);      /* number of literal/length codes */
+  DUMPBITS(5)
+  NEEDBITS(5)
+  nd = 1 + ((unsigned)b & 0x1f);        /* number of distance codes */
+  DUMPBITS(5)
+  NEEDBITS(4)
+  nb = 4 + ((unsigned)b & 0xf);         /* number of bit length codes */
+  DUMPBITS(4)
+#ifdef PKZIP_BUG_WORKAROUND
+  if (nl > 288 || nd > 32)
+#else
+  if (nl > 286 || nd > 30)
+#endif
+    return 1;                   /* bad lengths */
+
+
+  /* read in bit-length-code lengths */
+  for (j = 0; j < nb; j++)
+  {
+    NEEDBITS(3)
+    ll[border[j]] = (unsigned)b & 7;
+    DUMPBITS(3)
+  }
+  for (; j < 19; j++)
+    ll[border[j]] = 0;
+
+
+  /* build decoding table for trees--single level, 7 bit lookup */
+  bl = 7;
+  if ((i = huft_build(ll, 19, 19, NULL, NULL, &tl, &bl)) != 0)
+  {
+    if (i == 1)
+      huft_free(tl);
+    return i;                   /* incomplete code set */
+  }
+
+
+  /* read in literal and distance code lengths */
+  n = nl + nd;
+  m = mask_bits[bl];
+  i = l = 0;
+  while ((unsigned)i < n)
+  {
+    NEEDBITS((unsigned)bl)
+    j = (td = tl + ((unsigned)b & m))->b;
+    DUMPBITS(j)
+    j = td->v.n;
+    if (j < 16)                 /* length of code in bits (0..15) */
+      ll[i++] = l = j;          /* save last length in l */
+    else if (j == 16)           /* repeat last length 3 to 6 times */
+    {
+      NEEDBITS(2)
+      j = 3 + ((unsigned)b & 3);
+      DUMPBITS(2)
+      if ((unsigned)i + j > n)
+        return 1;
+      while (j--)
+        ll[i++] = l;
+    }
+    else if (j == 17)           /* 3 to 10 zero length codes */
+    {
+      NEEDBITS(3)
+      j = 3 + ((unsigned)b & 7);
+      DUMPBITS(3)
+      if ((unsigned)i + j > n)
+        return 1;
+      while (j--)
+        ll[i++] = 0;
+      l = 0;
+    }
+    else                        /* j == 18: 11 to 138 zero length codes */
+    {
+      NEEDBITS(7)
+      j = 11 + ((unsigned)b & 0x7f);
+      DUMPBITS(7)
+      if ((unsigned)i + j > n)
+        return 1;
+      while (j--)
+        ll[i++] = 0;
+      l = 0;
+    }
+  }
+
+
+  /* free decoding table for trees */
+  huft_free(tl);
+
+
+  /* restore the global bit buffer */
+  bb = b;
+  bk = k;
+
+
+  /* build the decoding tables for literal/length and distance codes */
+  bl = lbits;
+  if ((i = huft_build(ll, nl, 257, cplens, cplext, &tl, &bl)) != 0)
+  {
+    if (i == 1) {
+      fprintf(stderr, " incomplete literal tree\n");
+      huft_free(tl);
+    }
+    return i;                   /* incomplete code set */
+  }
+  bd = dbits;
+  if ((i = huft_build(ll + nl, nd, 0, cpdist, cpdext, &td, &bd)) != 0)
+  {
+    if (i == 1) {
+      fprintf(stderr, " incomplete distance tree\n");
+#ifdef PKZIP_BUG_WORKAROUND
+      i = 0;
+    }
+#else
+      huft_free(td);
+    }
+    huft_free(tl);
+    return i;                   /* incomplete code set */
+#endif
+  }
+
+
+  /* decompress until an end-of-block code */
+  z_control = 0x40;
+LABEL12:
+  if (inflate_codes(tl, td, bl, bd))
+    return 1;
+    	z_control = 0;
+
+
+  /* free the decoding tables, return */
+  huft_free(tl);
+  huft_free(td);
+  return 0;
+}
+
+
+
+int inflate_block(e)
+int *e;                 /* last block flag */
+/* decompress an inflated block */
+{
+  static unsigned t;           /* block type */
+  static ulg b;       /* bit buffer */
+  static unsigned k;  /* number of bits in bit buffer */
+
+  if ((z_control == 0x41) || (z_control == 0x42))  {
+  	goto LABEL9;
+  }
+  if ((z_control == 0x81) || (z_control == 0x82))  {
+  	goto LABEL10;
+  }
+  if (z_control == 3)  {
+  	goto LABEL11;
+  }
+
+  /* make local bit buffer */
+  b = bb;
+  k = bk;
+
+
+  /* read in last block bit */
+  NEEDBITS(1)
+  *e = (int)b & 1;
+  DUMPBITS(1)
+
+
+  /* read in block type */
+  NEEDBITS(2)
+  t = (unsigned)b & 3;
+  DUMPBITS(2)
+
+
+  /* restore the global bit buffer */
+  bb = b;
+  bk = k;
+
+
+  /* inflate that block type */
+  if (t == 2)  {
+LABEL9:
+    return inflate_dynamic();
+  }
+  if (t == 0)  {
+LABEL11:
+    return inflate_stored();
+  }
+  if (t == 1)  {
+LABEL10:
+    return inflate_fixed();
+  }
+
+
+  /* bad block type */
+  return 2;
+}
+
+
+
+int inflate()
+/* decompress an inflated entry */
+{
+  static int e;                /* last block flag */
+  static int r;                /* result code */
+  static unsigned h;           /* maximum struct huft's malloc'ed */
+
+  if (z_control == 4)  {
+  	goto LABEL4;
+  }
+  else if (z_control != 0)  {
+  	goto LABEL8;
+  }
+
+
+  /* initialize window, bit buffer */
+  wp = 0;
+  bk = 0;
+  bb = 0;
+
+
+  /* decompress until the last block */
+  h = 0;
+  do {
+    hufts = 0;
+LABEL8:
+    if ((r = inflate_block(&e)) != 0)
+      return r;
+    if (hufts > h)
+      h = hufts;
+  } while (!e);
+
+  /* Undo too much lookahead. The next read will be byte aligned so we
+   * can discard unused bits in the last meaningful byte.
+   */
+  while (bk >= 8) {
+    bk -= 8;
+    inptr--;
+  }
+
+  /* flush out slide */
+  z_control = 4;
+  flush_output(wp);
+LABEL4:
+	z_control = 0;
+
+
+  /* return success */
+#ifdef DEBUG
+  fprintf(stderr, "<%u> ", h);
+#endif /* DEBUG */
+  return 0;
+}
+
+
+
+/* =========================== util.c, with modifications ========== */
+/* util.c -- utility functions for gzip support
+ * Copyright (C) 1992-1993 Jean-loup Gailly
+ * This is free software; you can redistribute it and/or modify it under the
+ * terms of the GNU General Public License, see the file COPYING.
+ */
+
+#ifdef RCSID
+static char rcsid[] = "$Id: util.c,v 0.15 1993/06/15 09:04:13 jloup Exp $";
+#endif
+
+#include <ctype.h>
+#include <errno.h>
+#include <sys/types.h>
+#include <setjmp.h>
+
+extern char	*global_buf;
+extern int	global_count;
+extern jmp_buf	global_env;
+extern int	global_val;
+
+/* #include "tailor.h" */
+#define PATH_SEP '/'
+#define casemap(c) (c)
+
+#ifdef HAVE_UNISTD_H
+#  include <unistd.h>
+#endif
+#ifndef NO_FCNTL_H
+#  include <fcntl.h>
+#endif
+
+#if defined(STDC_HEADERS) || !defined(NO_STDLIB_H)
+#  include <stdlib.h>
+#else
+   extern int errno;
+#endif
+
+#include "gzip.h"
+/* #include "crypt.h" */
+
+extern ulg crc_32_tab[];   /* crc table, defined below */
+
+/* ===========================================================================
+ * Copy input to output unchanged: zcat == cat with --force.
+ * IN assertion: insize bytes have already been read in inbuf.
+ */
+int copy(in, out)
+    int in, out;   /* input and output file descriptors */
+{
+    errno = 0;
+    while (insize != 0 && (int)insize != EOF) {
+	write_buf(out, (char*)inbuf, insize);
+	bytes_out += insize;
+	insize = read(in, (char*)inbuf, INBUFSIZ);
+    }
+    if ((int)insize == EOF && errno != 0) {
+	read_error();
+    }
+    bytes_in = bytes_out;
+    return OK;
+}
+
+/* ===========================================================================
+ * Run a set of bytes through the crc shift register.  If s is a NULL
+ * pointer, then initialize the crc shift register contents instead.
+ * Return the current crc in either case.
+ */
+ulg updcrc(s, n)
+    uch *s;                 /* pointer to bytes to pump through */
+    unsigned n;             /* number of bytes in s[] */
+{
+    register ulg c;         /* temporary variable */
+
+    static ulg crc = (ulg)0xffffffffL; /* shift register contents */
+
+    if (s == NULL) {
+	c = 0xffffffffL;
+    } else {
+	c = crc;
+        if (n) do {
+            c = crc_32_tab[((int)c ^ (*s++)) & 0xff] ^ (c >> 8);
+        } while (--n);
+    }
+    crc = c;
+    return c ^ 0xffffffffL;       /* (instead of ~c for 64-bit machines) */
+}
+
+/* ===========================================================================
+ * Clear input and output buffers
+ */
+void clear_bufs()
+{
+    outcnt = 0;
+    insize = inptr = 0;
+    bytes_in = bytes_out = 0L;
+}
+
+/* ===========================================================================
+ * Fill the input buffer. This is called only when the buffer is empty.
+ */
+int fill_inbuf(eof_ok)
+    int eof_ok;          /* set if EOF acceptable as a result */
+{
+    int len;
+
+    /* Read as much as possible */
+    insize = 0;
+    errno = 0;
+    do {
+	len = read(ifd, (char*)inbuf+insize, INBUFSIZ-insize);
+        if (len == 0 || len == EOF) break;
+	insize += len;
+    } while (insize < INBUFSIZ);
+
+    if (insize == 0) {
+	if (eof_ok) return EOF;
+	read_error();
+    }
+    bytes_in += (ulg)insize;
+    inptr = 1;
+    return inbuf[0];
+}
+
+/* ===========================================================================
+ * Write the output buffer outbuf[0..outcnt-1] and update bytes_out.
+ * (used for the compressed data only)
+ */
+void flush_outbuf()
+{
+    if (outcnt == 0) return;
+
+    write_buf(ofd, (char *)outbuf, outcnt);
+    bytes_out += (ulg)outcnt;
+    outcnt = 0;
+}
+
+/* ===========================================================================
+ * Write the output window window[0..outcnt-1] and update crc and bytes_out.
+ * (Used for the decompressed data only.)
+ */
+void flush_window()
+{
+    if (outcnt == 0) {
+	global_count = 0;
+    	return;
+    }
+    updcrc(window, outcnt);
+
+    if (!test) {
+/*	write_buf(ofd, (char *)window, outcnt); */
+    }
+if (global_count > outcnt)  {
+	memcpy(global_buf, (char *)window, outcnt);
+	bytes_out += (ulg)outcnt;
+	global_count = outcnt;
+	outcnt = 0;
+}
+else  {
+	memcpy(global_buf, (char *)window, global_count);
+	bytes_out += (ulg)global_count;
+	outcnt -= global_count;
+}
+longjmp(global_env, 1);
+}
+
+/* ===========================================================================
+ * Does the same as write(), but also handles partial pipe writes and checks
+ * for error return.
+ */
+void write_buf(fd, buf, cnt)
+    int       fd;
+    voidp     buf;
+    unsigned  cnt;
+{
+    unsigned  n;
+
+    while ((n = write(fd, buf, cnt)) != cnt) {
+	if (n == (unsigned)(-1)) {
+	    write_error();
+	}
+	cnt -= n;
+	buf = (voidp)((char*)buf+n);
+    }
+}
+
+/* ========================================================================
+ * Put string s in lower case, return s.
+ */
+char *strlwr(s)
+    char *s;
+{
+    char *t;
+    for (t = s; *t; t++) *t = tolow(*t);
+    return s;
+}
+
+/* ========================================================================
+ * Return the base name of a file (remove any directory prefix and
+ * any version suffix). For systems with file names that are not
+ * case sensitive, force the base name to lower case.
+ */
+char *local_basename(fname)
+    char *fname;
+{
+    char *p;
+
+    if ((p = strrchr(fname, PATH_SEP))  != NULL) fname = p+1;
+#ifdef PATH_SEP2
+    if ((p = strrchr(fname, PATH_SEP2)) != NULL) fname = p+1;
+#endif
+#ifdef PATH_SEP3
+    if ((p = strrchr(fname, PATH_SEP3)) != NULL) fname = p+1;
+#endif
+#ifdef SUFFIX_SEP
+    if ((p = strrchr(fname, SUFFIX_SEP)) != NULL) *p = '\0';
+#endif
+    if (casemap('A') == 'a') strlwr(fname);
+    return fname;
+}
+
+/* ========================================================================
+ * Make a file name legal for file systems not allowing file names with
+ * multiple dots or starting with a dot (such as MSDOS), by changing
+ * all dots except the last one into underlines.  A target dependent
+ * function can be used instead of this simple function by defining the macro
+ * MAKE_LEGAL_NAME in tailor.h and providing the function in a target
+ * dependent module.
+ */
+void make_simple_name(name)
+    char *name;
+{
+    char *p = strrchr(name, '.');
+    if (p == NULL) return;
+    if (p == name) p++;
+    do {
+        if (*--p == '.') *p = '_';
+    } while (p != name);
+}
+
+
+#if defined(NO_STRING_H) && !defined(STDC_HEADERS)
+
+/* Provide missing strspn and strcspn functions. */
+
+#  ifndef __STDC__
+#    define const
+#  endif
+
+int strspn  OF((const char *s, const char *accept));
+int strcspn OF((const char *s, const char *reject));
+
+/* ========================================================================
+ * Return the length of the maximum initial segment
+ * of s which contains only characters in accept.
+ */
+int strspn(s, accept)
+    const char *s;
+    const char *accept;
+{
+    register const char *p;
+    register const char *a;
+    register int count = 0;
+
+    for (p = s; *p != '\0'; ++p) {
+	for (a = accept; *a != '\0'; ++a) {
+	    if (*p == *a) break;
+	}
+	if (*a == '\0') return count;
+	++count;
+    }
+    return count;
+}
+
+/* ========================================================================
+ * Return the length of the maximum inital segment of s
+ * which contains no characters from reject.
+ */
+int strcspn(s, reject)
+    const char *s;
+    const char *reject;
+{
+    register int count = 0;
+
+    while (*s != '\0') {
+	if (strchr(reject, *s++) != NULL) return count;
+	++count;
+    }
+    return count;
+}
+
+#endif /* NO_STRING_H */
+
+/* ========================================================================
+ * Add an environment variable (if any) before argv, and update argc.
+ * Return the expanded environment variable to be freed later, or NULL 
+ * if no options were added to argv.
+ */
+#define SEPARATOR	" \t"	/* separators in env variable */
+
+char *add_envopt(argcp, argvp, env)
+    int *argcp;          /* pointer to argc */
+    char ***argvp;       /* pointer to argv */
+    char *env;           /* name of environment variable */
+{
+    char *p;             /* running pointer through env variable */
+    char **oargv;        /* runs through old argv array */
+    char **nargv;        /* runs through new argv array */
+    int	 oargc = *argcp; /* old argc */
+    int  nargc = 0;      /* number of arguments in env variable */
+
+    env = (char*)getenv(env);
+    if (env == NULL) return NULL;
+
+    p = (char*)xmalloc(strlen(env)+1);
+    env = strcpy(p, env);                    /* keep env variable intact */
+
+    for (p = env; *p; nargc++ ) {            /* move through env */
+	p += strspn(p, SEPARATOR);	     /* skip leading separators */
+	if (*p == '\0') break;
+
+	p += strcspn(p, SEPARATOR);	     /* find end of word */
+	if (*p) *p++ = '\0';		     /* mark it */
+    }
+    if (nargc == 0) {
+	free(env);
+	return NULL;
+    }
+    *argcp += nargc;
+    /* Allocate the new argv array, with an extra element just in case
+     * the original arg list did not end with a NULL.
+     */
+    nargv = (char**)calloc(*argcp+1, sizeof(char *));
+    if (nargv == NULL) error("out of memory");
+    oargv  = *argvp;
+    *argvp = nargv;
+
+    /* Copy the program name first */
+    if (oargc-- < 0) error("argc<=0");
+    *(nargv++) = *(oargv++);
+
+    /* Then copy the environment args */
+    for (p = env; nargc > 0; nargc--) {
+	p += strspn(p, SEPARATOR);	     /* skip separators */
+	*(nargv++) = p;			     /* store start */
+	while (*p++) ;			     /* skip over word */
+    }
+
+    /* Finally copy the old args and add a NULL (usual convention) */
+    while (oargc--) *(nargv++) = *(oargv++);
+    *nargv = NULL;
+    return env;
+}
+
+/* ========================================================================
+ * Error handlers.
+ */
+void error(m)
+    char *m;
+{
+    fprintf(stderr, "\n%s: %s: %s\n", progname, ifname, m);
+    abort_gzip();
+}
+
+void warn(a, b)
+    char *a, *b;            /* message strings juxtaposed in output */
+{
+    WARN((stderr, "%s: %s: warning: %s%s\n", progname, ifname, a, b));
+}
+
+void read_error()
+{
+/*    fprintf(stderr, "\n%s: ", progname); */
+    if (errno != 0) {
+	perror(ifname);
+    } else {
+	fprintf(stderr, "unexpected end of file\n");
+    }
+    abort_gzip();
+}
+
+void write_error()
+{
+    fprintf(stderr, "\n%s: ", progname);
+    perror(ofname);
+    abort_gzip();
+}
+
+/* ========================================================================
+ * Display compression ratio on the given stream on 6 characters.
+ */
+void display_ratio(num, den, file)
+    long num;
+    long den;
+    FILE *file;
+{
+    long ratio;  /* 1000 times the compression ratio */
+
+    if (den == 0) {
+	ratio = 0; /* no compression */
+    } else if (den < 2147483L) { /* (2**31 -1)/1000 */
+	ratio = 1000L*num/den;
+    } else {
+	ratio = num/(den/1000L);
+    }
+    if (ratio < 0) {
+	putc('-', file);
+	ratio = -ratio;
+    } else {
+	putc(' ', file);
+    }
+    fprintf(file, "%2ld.%1ld%%", ratio / 10L, ratio % 10L);
+}
+
+
+/* ========================================================================
+ * Semi-safe malloc -- never returns NULL.
+ */
+voidp xmalloc (size)
+    unsigned size;
+{
+    voidp cp = (voidp)malloc (size);
+
+    if (cp == NULL) error("out of memory");
+    return cp;
+}
+
+/* ========================================================================
+ * Table of CRC-32's of all single-byte values (made by makecrc.c)
+ */
+ulg crc_32_tab[] = {
+  0x00000000L, 0x77073096L, 0xee0e612cL, 0x990951baL, 0x076dc419L,
+  0x706af48fL, 0xe963a535L, 0x9e6495a3L, 0x0edb8832L, 0x79dcb8a4L,
+  0xe0d5e91eL, 0x97d2d988L, 0x09b64c2bL, 0x7eb17cbdL, 0xe7b82d07L,
+  0x90bf1d91L, 0x1db71064L, 0x6ab020f2L, 0xf3b97148L, 0x84be41deL,
+  0x1adad47dL, 0x6ddde4ebL, 0xf4d4b551L, 0x83d385c7L, 0x136c9856L,
+  0x646ba8c0L, 0xfd62f97aL, 0x8a65c9ecL, 0x14015c4fL, 0x63066cd9L,
+  0xfa0f3d63L, 0x8d080df5L, 0x3b6e20c8L, 0x4c69105eL, 0xd56041e4L,
+  0xa2677172L, 0x3c03e4d1L, 0x4b04d447L, 0xd20d85fdL, 0xa50ab56bL,
+  0x35b5a8faL, 0x42b2986cL, 0xdbbbc9d6L, 0xacbcf940L, 0x32d86ce3L,
+  0x45df5c75L, 0xdcd60dcfL, 0xabd13d59L, 0x26d930acL, 0x51de003aL,
+  0xc8d75180L, 0xbfd06116L, 0x21b4f4b5L, 0x56b3c423L, 0xcfba9599L,
+  0xb8bda50fL, 0x2802b89eL, 0x5f058808L, 0xc60cd9b2L, 0xb10be924L,
+  0x2f6f7c87L, 0x58684c11L, 0xc1611dabL, 0xb6662d3dL, 0x76dc4190L,
+  0x01db7106L, 0x98d220bcL, 0xefd5102aL, 0x71b18589L, 0x06b6b51fL,
+  0x9fbfe4a5L, 0xe8b8d433L, 0x7807c9a2L, 0x0f00f934L, 0x9609a88eL,
+  0xe10e9818L, 0x7f6a0dbbL, 0x086d3d2dL, 0x91646c97L, 0xe6635c01L,
+  0x6b6b51f4L, 0x1c6c6162L, 0x856530d8L, 0xf262004eL, 0x6c0695edL,
+  0x1b01a57bL, 0x8208f4c1L, 0xf50fc457L, 0x65b0d9c6L, 0x12b7e950L,
+  0x8bbeb8eaL, 0xfcb9887cL, 0x62dd1ddfL, 0x15da2d49L, 0x8cd37cf3L,
+  0xfbd44c65L, 0x4db26158L, 0x3ab551ceL, 0xa3bc0074L, 0xd4bb30e2L,
+  0x4adfa541L, 0x3dd895d7L, 0xa4d1c46dL, 0xd3d6f4fbL, 0x4369e96aL,
+  0x346ed9fcL, 0xad678846L, 0xda60b8d0L, 0x44042d73L, 0x33031de5L,
+  0xaa0a4c5fL, 0xdd0d7cc9L, 0x5005713cL, 0x270241aaL, 0xbe0b1010L,
+  0xc90c2086L, 0x5768b525L, 0x206f85b3L, 0xb966d409L, 0xce61e49fL,
+  0x5edef90eL, 0x29d9c998L, 0xb0d09822L, 0xc7d7a8b4L, 0x59b33d17L,
+  0x2eb40d81L, 0xb7bd5c3bL, 0xc0ba6cadL, 0xedb88320L, 0x9abfb3b6L,
+  0x03b6e20cL, 0x74b1d29aL, 0xead54739L, 0x9dd277afL, 0x04db2615L,
+  0x73dc1683L, 0xe3630b12L, 0x94643b84L, 0x0d6d6a3eL, 0x7a6a5aa8L,
+  0xe40ecf0bL, 0x9309ff9dL, 0x0a00ae27L, 0x7d079eb1L, 0xf00f9344L,
+  0x8708a3d2L, 0x1e01f268L, 0x6906c2feL, 0xf762575dL, 0x806567cbL,
+  0x196c3671L, 0x6e6b06e7L, 0xfed41b76L, 0x89d32be0L, 0x10da7a5aL,
+  0x67dd4accL, 0xf9b9df6fL, 0x8ebeeff9L, 0x17b7be43L, 0x60b08ed5L,
+  0xd6d6a3e8L, 0xa1d1937eL, 0x38d8c2c4L, 0x4fdff252L, 0xd1bb67f1L,
+  0xa6bc5767L, 0x3fb506ddL, 0x48b2364bL, 0xd80d2bdaL, 0xaf0a1b4cL,
+  0x36034af6L, 0x41047a60L, 0xdf60efc3L, 0xa867df55L, 0x316e8eefL,
+  0x4669be79L, 0xcb61b38cL, 0xbc66831aL, 0x256fd2a0L, 0x5268e236L,
+  0xcc0c7795L, 0xbb0b4703L, 0x220216b9L, 0x5505262fL, 0xc5ba3bbeL,
+  0xb2bd0b28L, 0x2bb45a92L, 0x5cb36a04L, 0xc2d7ffa7L, 0xb5d0cf31L,
+  0x2cd99e8bL, 0x5bdeae1dL, 0x9b64c2b0L, 0xec63f226L, 0x756aa39cL,
+  0x026d930aL, 0x9c0906a9L, 0xeb0e363fL, 0x72076785L, 0x05005713L,
+  0x95bf4a82L, 0xe2b87a14L, 0x7bb12baeL, 0x0cb61b38L, 0x92d28e9bL,
+  0xe5d5be0dL, 0x7cdcefb7L, 0x0bdbdf21L, 0x86d3d2d4L, 0xf1d4e242L,
+  0x68ddb3f8L, 0x1fda836eL, 0x81be16cdL, 0xf6b9265bL, 0x6fb077e1L,
+  0x18b74777L, 0x88085ae6L, 0xff0f6a70L, 0x66063bcaL, 0x11010b5cL,
+  0x8f659effL, 0xf862ae69L, 0x616bffd3L, 0x166ccf45L, 0xa00ae278L,
+  0xd70dd2eeL, 0x4e048354L, 0x3903b3c2L, 0xa7672661L, 0xd06016f7L,
+  0x4969474dL, 0x3e6e77dbL, 0xaed16a4aL, 0xd9d65adcL, 0x40df0b66L,
+  0x37d83bf0L, 0xa9bcae53L, 0xdebb9ec5L, 0x47b2cf7fL, 0x30b5ffe9L,
+  0xbdbdf21cL, 0xcabac28aL, 0x53b39330L, 0x24b4a3a6L, 0xbad03605L,
+  0xcdd70693L, 0x54de5729L, 0x23d967bfL, 0xb3667a2eL, 0xc4614ab8L,
+  0x5d681b02L, 0x2a6f2b94L, 0xb40bbe37L, 0xc30c8ea1L, 0x5a05df1bL,
+  0x2d02ef8dL
+};

Added: packages/drawmap/branches/upstream/current/gzip.h
===================================================================
--- packages/drawmap/branches/upstream/current/gzip.h	                        (rev 0)
+++ packages/drawmap/branches/upstream/current/gzip.h	2007-05-18 12:29:03 UTC (rev 834)
@@ -0,0 +1,325 @@
+/* gzip.h -- common declarations for all gzip modules
+ * Copyright (C) 1992-1993 Jean-loup Gailly.
+ * This is free software; you can redistribute it and/or modify it under the
+ * terms of the GNU General Public License, see the file COPYING.
+ *
+ * This file also contains some simple modifications by Fred Erickson
+ * to allow it to be incorporated into the "drawmap" program.
+ * Copyright (c) 1998  Fred M. Erickson
+ * The modifications are also distributed under the terms of the
+ * GNU General Public License.
+ */
+
+#ifndef GZIP_H
+#define GZIP_H
+
+#if defined(__STDC__) || defined(PROTO)
+#  define OF(args)  args
+#else
+#  define OF(args)  ()
+#endif
+
+#ifdef __STDC__
+   typedef void *voidp;
+#else
+   typedef char *voidp;
+#endif
+
+/* I don't like nested includes, but the string and io functions are used
+ * too often
+ */
+#include <stdio.h>
+#if !defined(NO_STRING_H) || defined(STDC_HEADERS)
+#  include <string.h>
+#  if !defined(STDC_HEADERS) && !defined(NO_MEMORY_H) && !defined(__GNUC__)
+#    include <memory.h>
+#  endif
+#  define memzero(s, n)     memset ((voidp)(s), 0, (n))
+#else
+#  include <strings.h>
+#  define strchr            index 
+#  define strrchr           rindex
+#  define memcpy(d, s, n)   bcopy((s), (d), (n)) 
+#  define memcmp(s1, s2, n) bcmp((s1), (s2), (n)) 
+#  define memzero(s, n)     bzero((s), (n))
+#endif
+
+#ifndef RETSIGTYPE
+#  define RETSIGTYPE void
+#endif
+
+#define local static
+
+typedef unsigned char  uch;
+typedef unsigned short ush;
+typedef unsigned long  ulg;
+
+/* Return codes from gzip */
+#define OK      0
+#define ERROR   1
+#define WARNING 2
+
+/* Compression methods (see algorithm.doc) */
+#define STORED      0
+#define COMPRESSED  1
+#define PACKED      2
+#define LZHED       3
+/* methods 4 to 7 reserved */
+#define DEFLATED    8
+#define MAX_METHODS 9
+extern int method;         /* compression method */
+
+/* To save memory for 16 bit systems, some arrays are overlaid between
+ * the various modules:
+ * deflate:  prev+head   window      d_buf  l_buf  outbuf
+ * unlzw:    tab_prefix  tab_suffix  stack  inbuf  outbuf
+ * inflate:              window             inbuf
+ * unpack:               window             inbuf  prefix_len
+ * unlzh:    left+right  window      c_table inbuf c_len
+ * For compression, input is done in window[]. For decompression, output
+ * is done in window except for unlzw.
+ */
+
+#ifndef	INBUFSIZ
+#  ifdef SMALL_MEM
+#    define INBUFSIZ  0x2000  /* input buffer size */
+#  else
+#    define INBUFSIZ  0x8000  /* input buffer size */
+#  endif
+#endif
+#define INBUF_EXTRA  64     /* required by unlzw() */
+
+#ifndef	OUTBUFSIZ
+#  ifdef SMALL_MEM
+#    define OUTBUFSIZ   8192  /* output buffer size */
+#  else
+#    define OUTBUFSIZ  16384  /* output buffer size */
+#  endif
+#endif
+#define OUTBUF_EXTRA 2048   /* required by unlzw() */
+
+#ifndef DIST_BUFSIZE
+#  ifdef SMALL_MEM
+#    define DIST_BUFSIZE 0x2000 /* buffer for distances, see trees.c */
+#  else
+#    define DIST_BUFSIZE 0x8000 /* buffer for distances, see trees.c */
+#  endif
+#endif
+
+#ifdef DYN_ALLOC
+#  define EXTERN(type, array)  extern type * near array
+#  define DECLARE(type, array, size)  type * near array
+#  define ALLOC(type, array, size) { \
+      array = (type*)fcalloc((size_t)(((size)+1L)/2), 2*sizeof(type)); \
+      if (array == NULL) error("insufficient memory"); \
+   }
+#  define FREE(array) {if (array != NULL) fcfree(array), array=NULL;}
+#else
+#  define EXTERN(type, array)  extern type array[]
+#  define DECLARE(type, array, size)  type array[size]
+#  define ALLOC(type, array, size)
+#  define FREE(array)
+#endif
+
+EXTERN(uch, inbuf);          /* input buffer */
+EXTERN(uch, outbuf);         /* output buffer */
+EXTERN(ush, d_buf);          /* buffer for distances, see trees.c */
+EXTERN(uch, window);         /* Sliding window and suffix table (unlzw) */
+#define tab_suffix window
+#ifndef MAXSEG_64K
+#  define tab_prefix prev    /* hash link (see deflate.c) */
+#  define head (prev+WSIZE)  /* hash head (see deflate.c) */
+   EXTERN(ush, tab_prefix);  /* prefix code (see unlzw.c) */
+#else
+#  define tab_prefix0 prev
+#  define head tab_prefix1
+   EXTERN(ush, tab_prefix0); /* prefix for even codes */
+   EXTERN(ush, tab_prefix1); /* prefix for odd  codes */
+#endif
+
+extern unsigned insize; /* valid bytes in inbuf */
+extern unsigned inptr;  /* index of next byte to be processed in inbuf */
+extern unsigned outcnt; /* bytes in output buffer */
+
+extern long bytes_in;   /* number of input bytes */
+extern long bytes_out;  /* number of output bytes */
+extern long header_bytes;/* number of bytes in gzip header */
+
+#define isize bytes_in
+/* for compatibility with old zip sources (to be cleaned) */
+
+extern int  ifd;        /* input file descriptor */
+extern int  ofd;        /* output file descriptor */
+extern char ifname[];   /* input file name or "stdin" */
+extern char ofname[];   /* output file name or "stdout" */
+extern char *progname;  /* program name */
+
+extern long time_stamp; /* original time stamp (modification time) */
+extern long ifile_size; /* input file size, -1 for devices (debug only) */
+
+typedef int file_t;     /* Do not use stdio */
+#define NO_FILE  (-1)   /* in memory compression */
+
+
+#define	PACK_MAGIC     "\037\036" /* Magic header for packed files */
+#define	GZIP_MAGIC     "\037\213" /* Magic header for gzip files, 1F 8B */
+#define	OLD_GZIP_MAGIC "\037\236" /* Magic header for gzip 0.5 = freeze 1.x */
+#define	LZH_MAGIC      "\037\240" /* Magic header for SCO LZH Compress files*/
+#define PKZIP_MAGIC    "\120\113\003\004" /* Magic header for pkzip files */
+
+/* gzip flag byte */
+#define ASCII_FLAG   0x01 /* bit 0 set: file probably ascii text */
+#define CONTINUATION 0x02 /* bit 1 set: continuation of multi-part gzip file */
+#define EXTRA_FIELD  0x04 /* bit 2 set: extra field present */
+#define ORIG_NAME    0x08 /* bit 3 set: original file name present */
+#define COMMENT      0x10 /* bit 4 set: file comment present */
+#define ENCRYPTED    0x20 /* bit 5 set: file is encrypted */
+#define RESERVED     0xC0 /* bit 6,7:   reserved */
+
+/* internal file attribute */
+#define UNKNOWN 0xffff
+#define BINARY  0
+#define ASCII   1
+
+#ifndef WSIZE
+#  define WSIZE 0x8000     /* window size--must be a power of two, and */
+#endif                     /*  at least 32K for zip's deflate method */
+
+#define MIN_MATCH  3
+#define MAX_MATCH  258
+/* The minimum and maximum match lengths */
+
+#define MIN_LOOKAHEAD (MAX_MATCH+MIN_MATCH+1)
+/* Minimum amount of lookahead, except at the end of the input file.
+ * See deflate.c for comments about the MIN_MATCH+1.
+ */
+
+#define MAX_DIST  (WSIZE-MIN_LOOKAHEAD)
+/* In order to simplify the code, particularly on 16 bit machines, match
+ * distances are limited to MAX_DIST instead of WSIZE.
+ */
+
+extern int decrypt;        /* flag to turn on decryption */
+extern int exit_code;      /* program exit code */
+extern int verbose;        /* be verbose (-v) */
+extern int quiet;          /* be quiet (-q) */
+extern int level;          /* compression level */
+extern int test;           /* check .z file integrity */
+extern int to_stdout;      /* output to stdout (-c) */
+extern int save_orig_name; /* set if original name must be saved */
+
+#define get_byte()  (inptr < insize ? inbuf[inptr++] : fill_inbuf(0))
+#define try_byte()  (inptr < insize ? inbuf[inptr++] : fill_inbuf(1))
+
+/* put_byte is used for the compressed output, put_ubyte for the
+ * uncompressed output. However unlzw() uses window for its
+ * suffix table instead of its output buffer, so it does not use put_ubyte
+ * (to be cleaned up).
+ */
+#define put_byte(c) {outbuf[outcnt++]=(uch)(c); if (outcnt==OUTBUFSIZ)\
+   flush_outbuf();}
+#define put_ubyte(c) {window[outcnt++]=(uch)(c); if (outcnt==WSIZE)\
+   flush_window();}
+
+/* Output a 16 bit value, lsb first */
+#define put_short(w) \
+{ if (outcnt < OUTBUFSIZ-2) { \
+    outbuf[outcnt++] = (uch) ((w) & 0xff); \
+    outbuf[outcnt++] = (uch) ((ush)(w) >> 8); \
+  } else { \
+    put_byte((uch)((w) & 0xff)); \
+    put_byte((uch)((ush)(w) >> 8)); \
+  } \
+}
+
+/* Output a 32 bit value to the bit stream, lsb first */
+#define put_long(n) { \
+    put_short((n) & 0xffff); \
+    put_short(((ulg)(n)) >> 16); \
+}
+
+#define seekable()    0  /* force sequential output */
+#define translate_eol 0  /* no option -a yet */
+
+#define tolow(c)  (isupper(c) ? (c)-'A'+'a' : (c))    /* force to lower case */
+
+/* Macros for getting two-byte and four-byte header values */
+#define SH(p) ((ush)(uch)((p)[0]) | ((ush)(uch)((p)[1]) << 8))
+#define LG(p) ((ulg)(SH(p)) | ((ulg)(SH((p)+2)) << 16))
+
+/* Diagnostic functions */
+#ifdef DEBUG
+#  define Assert(cond,msg) {if(!(cond)) error(msg);}
+#  define Trace(x) fprintf x
+#  define Tracev(x) {if (verbose) fprintf x ;}
+#  define Tracevv(x) {if (verbose>1) fprintf x ;}
+#  define Tracec(c,x) {if (verbose && (c)) fprintf x ;}
+#  define Tracecv(c,x) {if (verbose>1 && (c)) fprintf x ;}
+#else
+#  define Assert(cond,msg)
+#  define Trace(x)
+#  define Tracev(x)
+#  define Tracevv(x)
+#  define Tracec(c,x)
+#  define Tracecv(c,x)
+#endif
+
+#define WARN(msg) {if (!quiet) fprintf msg ; \
+		   if (exit_code == OK) exit_code = WARNING;}
+
+	/* in zip.c: */
+extern int zip        OF((int in, int out));
+extern int file_read  OF((char *buf,  unsigned size));
+
+	/* in unzip.c */
+extern int unzip      OF((int in, int out));
+extern int check_zipfile OF((int in));
+
+	/* in unpack.c */
+extern int unpack     OF((int in, int out));
+
+	/* in unlzh.c */
+extern int unlzh      OF((int in, int out));
+
+	/* in gzip.c */
+RETSIGTYPE abort_gzip OF((void));
+
+        /* in deflate.c */
+void lm_init OF((int pack_level, ush *flags));
+ulg  deflate OF((void));
+
+        /* in trees.c */
+void ct_init     OF((ush *attr, int *method));
+int  ct_tally    OF((int dist, int lc));
+ulg  flush_block OF((char *buf, ulg stored_len, int eof));
+
+        /* in bits.c */
+void     bi_init    OF((file_t zipfile));
+void     send_bits  OF((int value, int length));
+unsigned bi_reverse OF((unsigned value, int length));
+void     bi_windup  OF((void));
+void     copy_block OF((char *buf, unsigned len, int header));
+extern   int (*read_buf) OF((char *buf, unsigned size));
+
+	/* in util.c: */
+extern int copy           OF((int in, int out));
+extern ulg  updcrc        OF((uch *s, unsigned n));
+extern void clear_bufs    OF((void));
+extern int  fill_inbuf    OF((int eof_ok));
+extern void flush_outbuf  OF((void));
+extern void flush_window  OF((void));
+extern void write_buf     OF((int fd, voidp buf, unsigned cnt));
+extern char *strlwr       OF((char *s));
+extern char *local_basename     OF((char *fname));
+extern void make_simple_name OF((char *name));
+extern char *add_envopt   OF((int *argcp, char ***argvp, char *env));
+extern void error         OF((char *m));
+extern void warn          OF((char *a, char *b));
+extern void read_error    OF((void));
+extern void write_error   OF((void));
+extern void display_ratio OF((long num, long den, FILE *file));
+extern voidp xmalloc      OF((unsigned int size));
+
+	/* in inflate.c */
+extern int inflate OF((void));
+#endif

Added: packages/drawmap/branches/upstream/current/ll2utm.1n
===================================================================
--- packages/drawmap/branches/upstream/current/ll2utm.1n	                        (rev 0)
+++ packages/drawmap/branches/upstream/current/ll2utm.1n	2007-05-18 12:29:03 UTC (rev 834)
@@ -0,0 +1,70 @@
+.TH LL2UTM 1 "Jul 24, 2001" \" -*- nroff -*-
+.SH NAME
+ll2utm \- Convert latitude/longitude geographical coordinates to UTM coordinates
+.SH SYNOPSIS
+.B ll2utm
+[-L] | [latitude longitude [nad27 | nad83 | wgs84]]
+
+.SH DESCRIPTION
+This program uses Redfearn's formulas to convert a given latitude and longitude
+into the equivalent Universal Transverse Mercator (UTM) coordinates.
+(This operation is often referred to
+as projection, since it projects a curved surface onto a flat plane.)
+The input latitude and longitude must be in decimal degrees.
+Latitudes south of the equator are negative, and longitudes west of the
+prime meridian are negative.
+UTM is intended for use in the latitude range from 80S to 84N.
+The program will accept input outside of this range, but will print
+a warning message.
+.PP
+The output takes the form of a single line, containing the UTM "x" value,
+the UTM "y" value, and the UTM zone, separated by white space.
+The "x" value includes the normal 500,000 false easting.
+The "y" value includes the normal 10,000,000 false northing,
+if the point is in the southern hemisphere.
+Points in the southern hemisphere are flagged by making the
+zone number negative.
+.PP
+If you provide just the "-L" option, the program will print some license
+information and exit.
+.PP
+Projections, and inverse projections, depend on defining an ellipsoid that
+approximates the shape of the earth (the reference ellipsoid) and defining
+reference coordinates (the datum) that allow measurements to be made.
+Different choices of the ellipsoid and datum can yield projections that differ by
+tens of meters.  There are a wide variety of choices, due to both
+the historical progression of measurement technology, and the
+desire to maximize accuracy over a given region (such as North America, or
+one of the United States).
+.PP
+This program defaults to the North American Datum of 1927 (NAD-27) with
+the Clarke Ellipsoid of 1866, since these appear to be appropriate for much
+of the freely-available data.
+The data are apparently in the process of being converted to the Geodetic
+Reference System 1980 (GRS-80) ellipsoid
+and NAD-83.  If you come across such data, you can specify "nad83"
+on the command line.
+The GTOPO30 data use the World Geodetic System 1984 (WGS-84) ellipsoid, which can be invoked by
+specifying "wgs84" on the command line.
+.SH SEE ALSO
+The
+.I utm2ll(1)
+command provides the inverse conversion.
+\" =========================================================================
+\" ll2utm.1 - The manual page for the ll2utm program.
+\" Copyright (c) 2000,2001  Fred M. Erickson
+\"
+\" This program is free software; you can redistribute it and/or modify
+\" it under the terms of the GNU General Public License as published by
+\" the Free Software Foundation; either version 2, or (at your option)
+\" any later version.
+\"
+\" This program is distributed in the hope that it will be useful,
+\" but WITHOUT ANY WARRANTY; without even the implied warranty of
+\" MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+\" GNU General Public License for more details.
+\"
+\" You should have received a copy of the GNU General Public License
+\" along with this program; if not, write to the Free Software
+\" Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
+\" =========================================================================

Added: packages/drawmap/branches/upstream/current/ll2utm.c
===================================================================
--- packages/drawmap/branches/upstream/current/ll2utm.c	                        (rev 0)
+++ packages/drawmap/branches/upstream/current/ll2utm.c	2007-05-18 12:29:03 UTC (rev 834)
@@ -0,0 +1,154 @@
+/*
+ * =========================================================================
+ * ll2utm - A program converts latitudes/longitudes to UTM coordinates
+ * Copyright (c) 2000  Fred M. Erickson
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2, or (at your option)
+ * any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program; if not, write to the Free Software
+ * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
+ * =========================================================================
+ *
+ *
+ * Program to use Redfearn's formulas to convert latitude/longitude
+ * geographical coordinates to UTM coordinates.
+ *
+ * There aren't a lot of comments in this program because it is
+ * basically a wrapper that calls the appropriate conversion function
+ * in the file utilities.c.  See the comments there for a description
+ * of the conversion process.
+ */
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <sys/types.h>
+#include "drawmap.h"
+
+
+void
+license(void)
+{
+	fprintf(stderr, "This program is free software; you can redistribute it and/or modify\n");
+	fprintf(stderr, "it under the terms of the GNU General Public License as published by\n");
+	fprintf(stderr, "the Free Software Foundation; either version 2, or (at your option)\n");
+	fprintf(stderr, "any later version.\n\n");
+
+	fprintf(stderr, "This program is distributed in the hope that it will be useful,\n");
+	fprintf(stderr, "but WITHOUT ANY WARRANTY; without even the implied warranty of\n");
+	fprintf(stderr, "MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n");
+	fprintf(stderr, "GNU General Public License for more details.\n\n");
+
+	fprintf(stderr, "You should have received a copy of the GNU General Public License\n");
+	fprintf(stderr, "along with this program; if not, write to the Free Software\n");
+	fprintf(stderr, "Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.\n");
+}
+
+main(int argc, char *argv[])
+{
+	double utm_x, utm_y, longitude, latitude;
+	long zone;
+	long dtype;
+	struct datum datum;
+
+	if ((argc != 3) && (argc != 4))  {
+		if ((argc == 2) && (argv[1][0] == '-') && (argv[1][1] == 'L'))  {
+			license();
+			exit(0);
+		}
+		fprintf(stderr, "Converts latitude/longitude coordinates to UTM coordinates.\n");
+		fprintf(stderr, "Usage:  %s latitude longitude [nad27 | nad83 | wgs84]\n", argv[0]);
+		fprintf(stderr, "The default is nad27.\n", argv[0]);
+		exit(0);
+	}
+	latitude = atof(argv[1]);
+	longitude = atof(argv[2]);
+	if (argc == 4)  {
+		if (strcmp(argv[3], "nad27")  == 0)  {
+			dtype = 0;
+		}
+		else if (strcmp(argv[3], "nad83") == 0)  {
+			dtype = 1;
+		}
+		else if (strcmp(argv[3], "wgs84") == 0)  {
+			dtype = 2;
+		}
+		else  {
+			fprintf(stderr, "Unknown datum specified.\n");
+			fprintf(stderr, "Usage:  %s latitude longitude [nad27 | nad83 | wgs84]\n", argv[0]);
+			fprintf(stderr, "Default is nad27.\n");
+			exit(0);
+		}
+	}
+	else  {
+		dtype = 0;
+	}
+
+
+	if (dtype == 0)  {
+		/* Fill in the datum parameters for NAD-27. */
+		datum.a = NAD27_SEMIMAJOR;
+		datum.b = NAD27_SEMIMINOR;
+		datum.e_2 = NAD27_E_SQUARED;
+		datum.f_inv = NAD27_F_INV;
+		datum.k0 = UTM_K0;
+		datum.a0 = NAD27_A0;
+		datum.a2 = NAD27_A2;
+		datum.a4 = NAD27_A4;
+		datum.a6 = NAD27_A6;
+	}
+	else if (dtype == 1)  {
+		/* Fill in the datum parameters for NAD-83. */
+		datum.a = NAD83_SEMIMAJOR;
+		datum.b = NAD83_SEMIMINOR;
+		datum.e_2 = NAD83_E_SQUARED;
+		datum.f_inv = NAD83_F_INV;
+		datum.k0 = UTM_K0;
+		datum.a0 = NAD83_A0;
+		datum.a2 = NAD83_A2;
+		datum.a4 = NAD83_A4;
+		datum.a6 = NAD83_A6;
+	}
+	else  {
+		/* Fill in the datum parameters for WGS-84. */
+		datum.a = WGS84_SEMIMAJOR;
+		datum.b = WGS84_SEMIMINOR;
+		datum.e_2 = WGS84_E_SQUARED;
+		datum.f_inv = WGS84_F_INV;
+		datum.k0 = UTM_K0;
+		datum.a0 = WGS84_A0;
+		datum.a2 = WGS84_A2;
+		datum.a4 = WGS84_A4;
+		datum.a6 = WGS84_A6;
+	}
+
+
+	if (redfearn(&datum, &utm_x, &utm_y, &zone, latitude, longitude, 0) != 0)  {
+		fprintf(stderr, "error in input parameters.\n");
+		exit(0);
+	}
+
+	if ((latitude > 84.0) || (latitude < -80.0))  {
+		fprintf(stderr, "Warning:  Given latitude is outside valid range of [-80,84] for UTM projections.\n");
+	}
+
+//	fprintf(stdout, "(%.10g %.10g %d) ===> (%.10g %.10g %d)\n", latitude, longitude, east_west, utm_x, utm_y, zone);
+	fprintf(stdout, "%.10g %.10g %d\n", utm_x, utm_y, zone);
+
+	if (longitude - 6.0 * floor(longitude / 6.0) == 0.0)  {
+		if ((longitude != 180.0) && (longitude != -180.0))  {
+			fprintf(stderr, "The longitude falls on a UTM zone boundary.\n");
+			fprintf(stderr, "It was arbitrarily placed in the more-western zone.\n");
+		}
+	}
+
+	exit(0);
+}

Added: packages/drawmap/branches/upstream/current/llsearch.1n
===================================================================
--- packages/drawmap/branches/upstream/current/llsearch.1n	                        (rev 0)
+++ packages/drawmap/branches/upstream/current/llsearch.1n	2007-05-18 12:29:03 UTC (rev 834)
@@ -0,0 +1,80 @@
+.TH LLSEARCH 1 "Jul 24, 2001" \" -*- nroff -*-
+.SH NAME
+llsearch \- Search a GNIS file for place names within a given block of latitude/longitude
+.SH SYNOPSIS
+.B llsearch
+[-L] | [latitude_low longitude_low latitude_high longitude_high]
+
+.SH DESCRIPTION
+The U.S. Geological Survey supports sites on the Internet with
+Geographic Names Information System (GNIS) files.
+These files contain lists of place names, complete with their latitude/longitude and other information.
+There are separate files for each of the U.S. states, and each file contains many, many, many place names.
+If you want to use this data with
+.I drawmap,
+it is useful to reduce the data to only the items that you need.
+.I Llsearch
+lets you filter a GNIS file and winnow out only those place names that fall within
+the latitude/longitude boundaries that you specify.
+(You may want to specify boundaries that are a tiny bit larger than what you are
+interested in, so that numerical quantization doesn't eliminate locales that fall
+exactly on your boundaries.)
+.PP
+Latitudes and longitudes are positive for north latitude and east longitude, and negative
+for south latitude and west longitude.
+.I Llsearch
+expects you to enter them in decimal degrees.
+(The latitudes and longitudes in the GNIS file are in degrees-minutes-seconds format, followed
+by 'N', 'S', 'E', or 'W'.  However, there are two available file formats, and one of the formats
+also contains the latitudes/longitudes in decimal degrees.)
+Typical usage is as follows:
+.TP
+gunzip -c california.gz | llsearch 33 -118 34 -117 > gnis_santa_ana_west
+.PP
+If you enter the "-L" option, the program will print some license information and exit.
+.PP
+Once you have reduced the data to some subset of interest, you can search for
+particular items via the
+.I grep
+or
+.I perl
+commands, or other search commands,
+or you can simply edit the results with your favorite text editor.
+Search commands are useful in reducing the sheer volume of data to
+a more manageable size (by extracting, say, all mountain summits or all
+streams), but you will probably ultimately end up looking through the remaining
+data manually.  The individual records contain codes, such as "ppl" for
+populated places, and "summit" for mountain tops, that can help you pick
+and choose.
+.PP
+There is considerable
+redundancy in place names, and human intelligence is useful in sorting
+things out.  While I was writing
+.I drawmap
+and
+.I llsearch,
+I frequently gazed out my office window, where I could spot at least
+two, and possibly three Baldy Mountains.  There are also quite a
+few Beaver Creeks, Bear Canyons, Saddle Buttes, and Springfields out there.
+By taking a close look at the information associated with each place name,
+you can find the particular locations that interest you.
+.SH SEE ALSO
+.I drawmap(1)
+\" =========================================================================
+\" llsearch.1 - The manual page for the llsearch program.
+\" Copyright (c) 1997,1998,1999,2000,2001  Fred M. Erickson
+\"
+\" This program is free software; you can redistribute it and/or modify
+\" it under the terms of the GNU General Public License as published by
+\" the Free Software Foundation; either version 2, or (at your option)
+\" any later version.
+\"
+\" This program is distributed in the hope that it will be useful,
+\" but WITHOUT ANY WARRANTY; without even the implied warranty of
+\" MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+\" GNU General Public License for more details.
+\"
+\" You should have received a copy of the GNU General Public License
+\" along with this program; if not, write to the Free Software
+\" Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
+\" =========================================================================

Added: packages/drawmap/branches/upstream/current/llsearch.c
===================================================================
--- packages/drawmap/branches/upstream/current/llsearch.c	                        (rev 0)
+++ packages/drawmap/branches/upstream/current/llsearch.c	2007-05-18 12:29:03 UTC (rev 834)
@@ -0,0 +1,192 @@
+/*
+ * =========================================================================
+ * llsearch - A program that extracts a subset of the data in a GNIS file.
+ * Copyright (c) 1997,1998,1999,2000  Fred M. Erickson
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2, or (at your option)
+ * any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program; if not, write to the Free Software
+ * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
+ * =========================================================================
+ *
+ *
+ * This is a filter to search a GNIS file for any entries within
+ * the specified latitude/longitude box.
+ *
+ * Each US state has its own GNIS file, but some of the larger
+ * states span many degrees of latitude and longitude.
+ * It is convenient to be able to pull out just the data you need
+ * for a given map.
+ *
+ * The program reads from stdin and writes to stdout.
+ */
+#include <sys/types.h>
+#include <stdlib.h>
+#include <stdio.h>
+#include <string.h>
+#include "drawmap.h"
+
+
+void
+license(void)
+{
+	fprintf(stderr, "This program is free software; you can redistribute it and/or modify\n");
+	fprintf(stderr, "it under the terms of the GNU General Public License as published by\n");
+	fprintf(stderr, "the Free Software Foundation; either version 2, or (at your option)\n");
+	fprintf(stderr, "any later version.\n\n");
+
+	fprintf(stderr, "This program is distributed in the hope that it will be useful,\n");
+	fprintf(stderr, "but WITHOUT ANY WARRANTY; without even the implied warranty of\n");
+	fprintf(stderr, "MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n");
+	fprintf(stderr, "GNU General Public License for more details.\n\n");
+
+	fprintf(stderr, "You should have received a copy of the GNU General Public License\n");
+	fprintf(stderr, "along with this program; if not, write to the Free Software\n");
+	fprintf(stderr, "Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.\n");
+}
+
+
+main(int argc, char *argv[])
+{
+	unsigned char buf[2001];
+	unsigned char *tok_ptr;
+	double latitude_low;
+	double latitude_high;
+	double longitude_low;
+	double longitude_high;
+	double latitude;
+	double longitude;
+	ssize_t ret_val;
+	int i;
+
+	if (argc != 5)  {
+		if ((argc == 2) && (argv[1][0] == '-') && (argv[1][1] == 'L'))  {
+			license();
+			exit(0);
+		}
+
+		fprintf(stderr, "Usage:  %s latitude_low longitude_low latitude_high longitude_high\n", argv[0]);
+		fprintf(stderr, "        (The latitude/longitude values are in decimal degrees.)\n");
+		fprintf(stderr, "        (West longitude is negative and south latitude is negative.)\n");
+		fprintf(stderr, "        (%s reads from stdin and writes to stdout.)\n", argv[0]);
+		exit(0);
+	}
+	latitude_low = strtod(argv[1], (char **)0);	/* We assume that these are in decimal degrees */
+	longitude_low = strtod(argv[2], (char **)0);	/* We assume that these are in decimal degrees */
+	latitude_high = strtod(argv[3], (char **)0);	/* We assume that these are in decimal degrees */
+	longitude_high = strtod(argv[4], (char **)0);	/* We assume that these are in decimal degrees */
+
+	if (latitude_low > latitude_high)  {
+		latitude = latitude_high;
+		latitude_high = latitude_low;
+		latitude_low = latitude;
+	}
+	if (longitude_low > longitude_high)  {
+		longitude = longitude_high;
+		longitude_high = longitude_low;
+		longitude_low = longitude;
+	}
+
+	if ((latitude_high < -90.0) || (latitude_high > 90.0) ||
+	    (latitude_low < -90.0) || (latitude_low > 90.0) ||
+	    (longitude_high < -180.0) || (longitude_high > 180.0) ||
+	    (longitude_low < -180.0) || (longitude_low > 180.0))  {
+		fprintf(stderr, "Error:  Parameters appear incorrect\n");
+		exit(0);
+	}
+	if ((latitude_high < latitude_low) || (longitude_high < longitude_low))  {
+		fprintf(stderr, "Error:  Parameters appear incorrect\n");
+		exit(0);
+	}
+
+	while ((ret_val = get_a_line(0, buf, 2000)) > 0)  {
+		buf[ret_val] == '\0';
+
+		/*
+		 * We need to figure out whether it is an old-style or new-style record.
+		 */
+		if ((tok_ptr = strstr(buf, "\",\"")) != (unsigned char *)0)  {
+			/* New-style record. */
+			if ((tok_ptr + 3) < (buf + ret_val))  {
+				tok_ptr += 3;
+			}
+			else  {
+				fprintf(stderr, "Defective GNIS record:  <%s>\n", buf);
+				continue;
+			}
+			for (i = 0; i < 7; i++)  {
+				if (((tok_ptr = strstr(tok_ptr, "\",\"")) != (unsigned char *)0) && (*tok_ptr != '\0'))  {
+					if ((tok_ptr + 3) < (buf + ret_val))  {
+						tok_ptr += 3;
+					}
+					else  {
+						break;
+					}
+				}
+				else  {
+					break;
+				}
+			}
+			if (i != 7)  {
+				/*
+				 * If i != 7, then we ran out of data before finding
+				 * the latitude.  Skip the record.
+				 */
+				fprintf(stderr, "Defective GNIS record:  <%s>\n", buf);
+				continue;
+			}
+			latitude = atof(tok_ptr);
+			if (((tok_ptr = strstr(tok_ptr, "\",\"")) != (unsigned char *)0) && (*tok_ptr != '\0') && (*(tok_ptr + 3) != '\0'))  {
+				tok_ptr += 3;
+				longitude = atof(tok_ptr);
+			}
+			else  {
+				fprintf(stderr, "Defective GNIS record:  <%s>\n", buf);
+				continue;
+			}
+		}
+		else  {
+			/* Old-style record. */
+			if (ret_val < 96)  {
+				/* The record is too short to process.  Ignore it. */
+				fprintf(stderr, "Defective GNIS record (too short):  <%s>\n", buf);
+				continue;
+			}
+
+			/*
+			 * Note:  We assume latitude_low, longitude_low, latitude_high, and longitude_high
+			 * were entered in decimal degrees.
+			 * latitude and longitude from the old-style GNIS files, however are in DDDMMSS format, and
+			 * require special conversion functions.
+			 */
+			if ((buf[86] != 'N') && (buf[86] != 'S'))  {
+				/* Defective record */
+				fprintf(stderr, "Defective GNIS record (latitude defective):  <%s>\n", buf);
+				continue;
+			}
+			if ((buf[95] != 'E') && (buf[95] != 'W'))  {
+				/* Defective record */
+				fprintf(stderr, "Defective GNIS record (longitude defective):  <%s>\n", buf);
+				continue;
+			}
+			latitude = lat_conv(&buf[80]);
+			longitude = lon_conv(&buf[88]);
+		}
+
+		if ((latitude >= latitude_low) &&
+		    (latitude <= latitude_high) &&
+		    (longitude >= longitude_low) &&
+		    (longitude <= longitude_high))  {
+			write(1, buf, ret_val);
+		}
+	}
+}

Added: packages/drawmap/branches/upstream/current/raster.h
===================================================================
--- packages/drawmap/branches/upstream/current/raster.h	                        (rev 0)
+++ packages/drawmap/branches/upstream/current/raster.h	2007-05-18 12:29:03 UTC (rev 834)
@@ -0,0 +1,35 @@
+/*
+ * =========================================================================
+ * raster.h - A header file to define relevant portions of the SUN rasterfile format.
+ * Copyright (c) 1997  Fred M. Erickson
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2, or (at your option)
+ * any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program; if not, write to the Free Software
+ * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
+ * =========================================================================
+ */
+
+#define	MAGIC   0x59a66a95
+#define STANDARD	1
+#define EQUAL_RGB	1
+
+struct rasterfile {
+    long magic;
+    long width;
+    long height;
+    long depth;
+    long length;
+    long type;
+    long maptype;
+    long maplength;
+};

Added: packages/drawmap/branches/upstream/current/sdts2dem.1n
===================================================================
--- packages/drawmap/branches/upstream/current/sdts2dem.1n	                        (rev 0)
+++ packages/drawmap/branches/upstream/current/sdts2dem.1n	2007-05-18 12:29:03 UTC (rev 834)
@@ -0,0 +1,119 @@
+.TH SDTS2DEM 1 "Jul 24, 2001" \" -*- nroff -*-
+.SH NAME
+sdts2dem \- Convert a 24K USGS SDTS DEM to a DEM in the `classic' format.
+.SH SYNOPSIS
+.B sdts2dem
+[-L] | [sdts_dem_file.ddf [output_file_name]]
+
+.SH DESCRIPTION
+The U.S. Geological Survey (USGS) provides sites on the Internet with
+a lot of Digital Elevation Model (DEM) data.
+Depending on the resolution of the data, it may be available in one
+of at least three different formats:  `classic' DEM format,
+a newer version of the `classic' format, or Spatial Data Transfer
+System (SDTS) format.
+The 24K DEM data (which are also called 7.5-minute DEM data)
+are only available for free-download in the SDTS format.
+.PP
+The
+.I drawmap
+program can read the files in SDTS format; but the SDTS data
+come in the form of archives, each of which contains numerous files.
+It may sometimes be more efficient, and perhaps simpler,
+to store the data in the `classic' format.
+.I Sdts2dem
+is a program that converts each SDTS archive into a single classic-format
+DEM file.
+So far,
+.I sdts2dem
+only works with 24K DEM data, mainly because I haven't got any
+other data available to test against.
+.PP
+If you invoke the program with the "-L" argument, it will print some
+license information and exit.
+In normal use, the first argument is an SDTS file name.
+.PP
+Each SDTS DEM archive should contain one or more files with names
+of the form ????CEL at .DDF,
+where the '?' symbol stands for any single character, and the '@'
+symbol stands for any single digit.  If you provide a single
+such file as an argument,
+.I sdts2dem
+will produce a classic-format DEM file, based on the given SDTS file
+and the other files in the SDTS archive.
+(When you unpack the SDTS archives, you can change all of the resulting
+file names to all lower case and/or compress all of the files
+with the
+.I gzip
+program.  If you are going to change to lower case, change all of the files.
+If you are going to compress the files, compress all of them.)
+.PP
+The USGS takes each 1-degree-square block of latitude and longitude, and divides it
+into an eight-by-eight grid of 7.5-minute-square `quads'.
+The rows of this grid are labeled 'a' to 'h' from
+bottom to top, and the columns are labeled `1' through `8' from right
+to left.  Each quad is then referred to by a name of the form `AABBBCD',
+where `AA' is the latitude of the southeast corner of the 1-degree block,
+`BBB' is the longitude of the southeast corner,
+and `C' and `D' represent the corresponding row and column labels.
+If you don't specify an output file name, the output file produced by
+.I sdts2dem
+will have the form `AABBBCD.dem'.
+If you specify an output file name, then your name will be used instead.
+.SH LIMITATIONS
+The converted files are in the newer version of the `classic' format.
+This newer format is theoretically backwards compatible with the
+older format, but has a bunch of new fields added to the file header
+(in space that the older format specified as blank).
+Most of these new fields will also be blank in the converted files, because
+their values are embedded in long human-readable text strings
+in the SDTS files, and I didn't consider it worth the effort to write
+a bunch of finicky code to dig them out.  Three of the new fields are
+included, though:  the horizontal datum, the vertical datum, and the
+vertical datum shift.
+These fields are useful in converting back and forth between coordinate
+systems, and in converting elevations to newer measurement scales.
+.PP
+.I Sdts2dem
+will try to populate all of the fields specified in the original `classic'
+format.  However, you may note some differences in the first 140 bytes of
+the header.  One such difference is
+that the latitude and longitude of the southeast corner usually
+appeared in bytes 131 through 139 of the original `classic' format, in
+a truncated form.  (The bytes in the DEM specification are numbered
+starting from 1.)
+In the new `classic' format, these two values are in bytes 110 through 135,
+in all of their un-truncated glory.
+As another example,
+.I sdts2dem
+doesn't even try to recover the free-format text field because it usually
+duplicates information present elsewhere, and because it isn't clear that
+we would end up with anything useful after automated conversion to
+SDTS and automated conversion back to `classic' DEM.
+.PP
+Some floating-point numbers will have a different format from their original
+USGS versions.  The USGS files normally put the first significant
+digit after the decimal point, while
+.I sdts2dem
+puts it in front of the decimal point.  The actual numeric values are the same,
+but the format differs.
+.SH SEE ALSO
+.I drawmap(1), sdts2dlg.1n
+\" =========================================================================
+\" sdts2dem.1 - The manual page for the sdts2dem program.
+\" Copyright (c) 2000,2001  Fred M. Erickson
+\"
+\" This program is free software; you can redistribute it and/or modify
+\" it under the terms of the GNU General Public License as published by
+\" the Free Software Foundation; either version 2, or (at your option)
+\" any later version.
+\"
+\" This program is distributed in the hope that it will be useful,
+\" but WITHOUT ANY WARRANTY; without even the implied warranty of
+\" MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+\" GNU General Public License for more details.
+\"
+\" You should have received a copy of the GNU General Public License
+\" along with this program; if not, write to the Free Software
+\" Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
+\" =========================================================================

Added: packages/drawmap/branches/upstream/current/sdts2dem.c
===================================================================
--- packages/drawmap/branches/upstream/current/sdts2dem.c	                        (rev 0)
+++ packages/drawmap/branches/upstream/current/sdts2dem.c	2007-05-18 12:29:03 UTC (rev 834)
@@ -0,0 +1,632 @@
+/*
+ * =========================================================================
+ * sdts2dem.c - A program to convert USGS SDTS DEM files to ordinary USGS DEM files.
+ * Copyright (c) 2000,2001  Fred M. Erickson
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2, or (at your option)
+ * any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program; if not, write to the Free Software
+ * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
+ * =========================================================================
+ */
+
+
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <math.h>
+#include <stdlib.h>
+#include <unistd.h>
+#include <stdio.h>
+#include <errno.h>
+#include <time.h>
+#include <string.h>
+#include "drawmap.h"
+#include "dem.h"
+#include "sdts_utils.h"
+
+
+
+/*
+ * Note to the reader of this code.  This code will probably be difficult
+ * to understand unless you are very familiar with the internals of SDTS files
+ * and `classic' DEM files.  Normally I would provide a lot of descriptive
+ * comments to help you along.  However, in this case, such comments would
+ * probably end up being several times the length of the code.  I wrote this
+ * program with two large documents available for reference.  If you want to
+ * follow the operation of the code, you will probably need those documents
+ * too.  The documents were:
+ *
+ * The Spatial Data Transfer Standard Mapping of the USGS Digital Elevation Model,
+ * 11/13/97 version 1, by Mid-Continent Mapping Center Branch of Research, Technology
+ * and Applications.
+ *
+ * Standards for Digital Elevation Models, US Department of the Interior,
+ * US Geological Survey, National Mapping Division, 8/97.
+ *
+ * There are comments at key points in the code, but they are not adequate
+ * for a full understanding unless you have the reference materials at hand.
+ *
+ * Even the documents aren't really enough.  It is also useful to have
+ * both sample SDTS files and sample `classic' DEM files for reference as well.
+ */
+
+
+
+
+
+void gen_header(char *, struct dem_record_type_a *, struct datum *);
+
+
+void
+license(void)
+{
+	fprintf(stderr, "This program is free software; you can redistribute it and/or modify\n");
+	fprintf(stderr, "it under the terms of the GNU General Public License as published by\n");
+	fprintf(stderr, "the Free Software Foundation; either version 2, or (at your option)\n");
+	fprintf(stderr, "any later version.\n\n");
+
+	fprintf(stderr, "This program is distributed in the hope that it will be useful,\n");
+	fprintf(stderr, "but WITHOUT ANY WARRANTY; without even the implied warranty of\n");
+	fprintf(stderr, "MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n");
+	fprintf(stderr, "GNU General Public License for more details.\n\n");
+
+	fprintf(stderr, "You should have received a copy of the GNU General Public License\n");
+	fprintf(stderr, "along with this program; if not, write to the Free Software\n");
+	fprintf(stderr, "Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.\n");
+}
+
+
+
+main(int argc, char *argv[])
+{
+	long i, j, k, l, m, n;
+	union {
+		unsigned long i;
+		float f;
+	} conv;
+	int dem_fdesc;
+	int output_fdesc;
+	ssize_t ret_val;
+	long length;
+	unsigned char buf[DEM_RECORD_LENGTH + 1];
+	char output_file[12];
+	long gz_flag;
+	char save_byte;
+	struct dem_corners dem_corners;
+	ssize_t (*read_function)();
+	long byte_order;
+	struct dem_record_type_a dem_a;
+	struct dem_record_type_c dem_c;
+	struct datum datum;
+	double latitude, longitude;
+	char code1, code2;
+	struct subfield subfield;
+	long get_ret;
+	short *ptr, *sptr;
+	long num_elevs;
+	double x, y;
+	long min_elev_prof, max_elev_prof;
+
+
+	if ((argc == 2) && (argv[1][0] == '-') && (argv[1][1] == 'L'))  {
+		license();
+		exit(0);
+	}
+	if ((argc != 2) && (argc != 3))  {
+		fprintf(stderr, "Usage:  %s ????CEL at .DDF [output_file_name]\n", argv[0]);
+		fprintf(stderr, "        Where the ???? are alphanumeric characters, and @ represents a digit.\n");
+		exit(0);
+	}
+
+
+	/*
+	 * Find the byte order of this machine.
+	 */
+	byte_order = swab_type();
+
+
+	/* Find file name length. */
+	length = strlen(argv[1]);
+	/* Find file name length. */
+	length = strlen(argv[1]);
+	if (length < 12)  {
+		fprintf(stderr, "File name %s appears too short to be valid.  Should look like ????CEL at .DDF\n", argv[1]);
+		exit(0);
+	}
+
+	/*
+	 * Figure out if the file is gzip-compressed or not.
+	 */
+	if ((strcmp(&argv[1][length - 3], ".gz") == 0) ||
+	    (strcmp(&argv[1][length - 3], ".GZ") == 0))  {
+		gz_flag = 1;
+		if ((dem_fdesc = buf_open_z(argv[1], O_RDONLY)) < 0)  {
+			fprintf(stderr, "Can't open %s for reading, errno = %d\n", argv[1], errno);
+			exit(0);
+		}
+		read_function = buf_read_z;
+	}
+	else  {
+		gz_flag = 0;
+		if ((dem_fdesc = buf_open(argv[1], O_RDONLY)) < 0)  {
+			fprintf(stderr, "Can't open %s for reading, errno = %d\n", argv[1], errno);
+			exit(0);
+		}
+		read_function = buf_read;
+	}
+
+
+	/*
+	 * Files in Spatial Data Transfer System (SDTS) format are markedly
+	 * different from the old DEM files.  (As a side note, there does not
+	 * appear to be a specific name for the DEM format.  Most documents
+	 * just call it DEM format, and use "SDTS DEM", or some equivalent
+	 * when they refer to SDTS formatted files.  I usually just call it
+	 * the ordinary DEM format.
+	 *
+	 * We insist that the user specify one, single, SDTS file
+	 * on the command line.
+	 * The file must be the one whose name has the form ????CEL?.DDF
+	 * (or ????cel?.ddf), and it may have a .gz on the end if it is gzip
+	 * compressed.
+	 *
+	 * We allow the files to be gzip-compressed, and they can have either
+	 * ".gz" or ".GZ" on the end.  However, we insist that the rest of
+	 * the file name have consistent case.  That is, if the 'F' or 'f'
+	 * in the ".DDF" or ".ddf" is in a given case, the rest of the file
+	 * had better be in that same case.
+	 *
+	 * If the following "if" test succeeds, we assume we have an SDTS file.
+	 */
+	if (((length >= 7) && (gz_flag != 0) &&
+	     ((strncmp(&argv[1][length - 7], ".ddf", 4) == 0) ||
+	      (strncmp(&argv[1][length - 7], ".DDF", 4) == 0))) ||
+	    ((length >= 4) && (gz_flag == 0) &&
+	     ((strcmp(&argv[1][length - 4], ".ddf") == 0) ||
+	      (strcmp(&argv[1][length - 4], ".DDF") == 0))))  {
+		/* SDTS file */
+
+		/* Close the file.  We will reopen it in parse_dem_sdts(). */
+		if (gz_flag == 0)  {
+			buf_close(dem_fdesc);
+		}
+		else  {
+			buf_close_z(dem_fdesc);
+		}
+
+		/*
+		 * Check that the file name takes the form that we expect.
+		 */
+		if (((gz_flag != 0) &&
+		     ((strncmp(&argv[1][length - 11], "ce", 2) != 0) &&
+		      (strncmp(&argv[1][length - 11], "CE", 2) != 0))) ||
+		    ((gz_flag == 0) &&
+		     (strncmp(&argv[1][length - 8], "ce", 2) != 0) &&
+		     (strncmp(&argv[1][length - 8], "CE", 2) != 0)))  {
+			fprintf(stderr, "The file %s looks like an SDTS file, but the name doesn't look right.\n", argv[1]);
+			exit(0);
+		}
+
+		/*
+		 * The file name looks okay.  Let's launch into the information parsing.
+		 */
+		if (parse_dem_sdts(argv[1], &dem_a, &dem_c, &datum, gz_flag) != 0)  {
+			exit(0);
+		}
+	}
+
+
+	/*
+	 * Print all of the parsed header data.
+	 */
+//	print_dem_a_c(&dem_a, &dem_c);
+
+
+	/* Create the output file. */
+	if (argc == 3)  {
+		if ((output_fdesc = open(argv[2], O_WRONLY | O_CREAT | O_EXCL, 0644)) < 0)  {
+			fprintf(stderr, "Can't create %s for writing, errno = %d\n", argv[2], errno);
+			exit(0);
+		}
+	}
+	else  {
+		code1 = 'a' + floor((fabs(dem_a.se_lat) + (dem_a.se_lat < 0 ? -1.0 : 1.0) * 0.02 - floor(fabs(dem_a.se_lat) + (dem_a.se_lat < 0 ? -1.0 : 1.0) * 0.02)) * 8.0);
+		code2 = '1' + floor((fabs(dem_a.se_long) + (dem_a.se_long < 0 ? 1.0 : -1.0) * 0.02 - floor(fabs(dem_a.se_long) + (dem_a.se_long < 0 ? 1.0 : -1.0) * 0.02)) * 8.0);
+		sprintf(output_file, "%02.2d%03.3d%c%c.dem",
+			(int)(fabs(dem_a.se_lat) + (dem_a.se_lat < 0 ? -1.0 : 1.0) * 0.02),
+			(int)(fabs(dem_a.se_long) + (dem_a.se_long < 0 ? 1.0 : -1.0) * 0.02),
+			code1, code2);
+		if ((output_fdesc = open(output_file, O_WRONLY | O_CREAT | O_EXCL, 0644)) < 0)  {
+			fprintf(stderr, "Can't create %s for writing, errno = %d\n", output_file, errno);
+			exit(0);
+		}
+	}
+
+
+	/*
+	 * Okay.  Fill in the header and write it out to the new DEM file.
+	 */
+	gen_header(buf, &dem_a, &datum);
+	if (write(output_fdesc, buf, DEM_RECORD_LENGTH) != DEM_RECORD_LENGTH)  {
+		fprintf(stderr, "Failed to write header to DEM file.\n");
+		exit(0);
+	}
+
+
+
+	/*
+	 * Now.  All that remains is to read in the actual elevation data, convert
+	 * it from west-to-east uniform-length profiles into south-to-north variable-length profiles
+	 * and write it to the output file.
+	 */
+
+
+	/*
+	 * Allocate the memory array to store the incoming data.
+	 * Since we have to convert from west-to-east profiles into south-to-north
+	 * profiles, we need to read in all of the data before we can output any.
+	 */
+	ptr = (short *)malloc(sizeof(short) * dem_a.cols * dem_a.rows);
+	if (ptr == (short *)0)  {
+		fprintf(stderr, "malloc of ptr failed\n");
+		exit(0);
+	}
+
+	/*
+	 * Open the file in preparation for parsing.
+	 */
+	if (begin_ddf(argv[1]) < 0)  {
+		fprintf(stderr, "Can't open %s for reading, errno = %d\n", argv[1], errno);
+		exit(0);
+	}
+	/*
+	 * Loop through the subfields until we find all of the elevation data.
+	 */
+	for (j = 0; j < dem_a.rows; j++)  {
+		while ((get_ret = get_subfield(&subfield)) != 0)  {
+			/*
+			 * Skip unwanted subfields at the beginning of the record.
+			 */
+			if (strcmp(subfield.tag, "CVLS") == 0)  {
+				break;
+			}
+		}
+		if (get_ret == 0)  {
+			/* At end of file and we still haven't found what we need. */
+			fprintf(stderr, "Ran out of data in file %s.\n", argv[1]);
+			end_ddf();
+			exit(0);
+		}
+		for (i = 0; i < dem_a.cols; i++)  {
+			if ((strstr(subfield.format, "B") != (char *)0) && (strcmp(subfield.label, "ELEVATION") == 0))  {
+				sptr = (ptr + j * dem_a.cols + i);
+				/*
+				 * These values, rather than being stored in the expected 'I' format (integer numbers),
+				 * are stored in two's-complement binary.  Thus, they must be properly swabbed
+				 * during conversion to internal form.
+				 */
+				if (subfield.length == 2)  {
+					if (byte_order == 0)  {
+						*sptr = (((long)subfield.value[1] << 8) & 0x0000ff00) + ((long)subfield.value[0] & 0x000000ff);
+					}
+					else  {
+						*sptr = (((long)subfield.value[0] << 8) & 0x0000ff00) + ((long)subfield.value[1] & 0x000000ff);
+					}
+				}
+				else if (subfield.length == 4)  {
+					/*
+					 * Note:  When the length is 4, we assume that this is a
+					 * BFP32 value, which means that it is a raw binary IEEE 754
+					 * floating point number.  Thus, this conversion won't work
+					 * on machines where IEEE 754 is not the native floating point
+					 * format.  We could convert from binary into the native floating
+					 * point format the hard way, but it appears that most machines
+					 * support IEEE 754, so we will try it this way for a while.
+					 */
+					conv.i = (((long)subfield.value[3] & 0xff) << 24) |
+						  (((long)subfield.value[2] & 0xff) << 16) |
+						  (((long)subfield.value[1] & 0xff) <<  8) |
+						   ((long)subfield.value[0] & 0xff);
+					if (byte_order == 0)  {
+						/* Do nothing. */
+					}
+					else if (byte_order == 1)  {
+						LE_SWAB(&conv.i);
+					}
+					else if (byte_order == 2)  {
+						PDP_SWAB(&conv.i);
+					}
+					*sptr = round(conv.f);
+				}
+				else  {
+					/* Error */
+					*sptr = dem_a.void_fill;
+				}
+
+				if (i == (dem_a.cols - 1))  {
+					break;
+				}
+			}
+
+			if (get_subfield(&subfield) == 0)  {
+				fprintf(stderr, "Shortage of data in %s.\n", argv[1]);
+				end_ddf();
+				exit(0);
+			}
+			if (strcmp(subfield.tag, "CVLS") != 0)  {
+				/* There weren't the expected number of elevations in the row. */
+				fprintf(stderr, "Shortage of data in %s.\n", argv[1]);
+				end_ddf();
+				exit(0);
+			}
+		}
+	}
+	/* We are done with this file, so close it. */
+	end_ddf();
+
+
+	/*
+	 * Okay.  The array is full of data.  Now we need to
+	 * write it out in DEM record format, and in south-to-north,
+	 * west-to-east order.
+	 */
+	x = dem_a.x_gp_first - dem_a.x_res;
+	for (j = 0; j < dem_a.cols; j++)  {
+		x = x + dem_a.x_res;
+
+		/*
+		 * Write the profile header.
+		 * First we need to find the minimum and maximum elevations for the profile,
+		 * and the total number of elevations in the profile.
+		 */
+		min_elev_prof = 100000;
+		max_elev_prof = -100000;
+		num_elevs = 0;
+		for (l = 0; l < dem_a.rows; l++)  {
+			sptr = (ptr + l * dem_a.cols + j);
+
+			/*
+			 * Some files contain 32767 as a marker for non-valid
+			 * data.  We take this opportunity to convert these values
+			 * into dem_a.edge_fill so that the rest of the code
+			 * doesn't have to deal with them.
+			 *
+			 * On second thought, one presumes that these values were
+			 * present in the original DEM files, before they were
+			 * converted to SDTS.  Thus, in the spirit of trying to
+			 * produce a DEM file as much like the original as possible,
+			 * we leave the values alone.
+			 *
+			 * There was also the additional problem that eliminating the
+			 * 32767 values would sometimes totally elminate a profile from
+			 * the DEM file, since num_elevs would be zero for a line with
+			 * only 32767 values.  This would mean that the file contents
+			 * would not jibe with the number of lines given in the global
+			 * header at the top of the file.  We could write addtional code
+			 * to deal with this, but it doesn't seem worth it to eliminate
+			 * data values that were probably present in the original source
+			 * file.
+			 */
+			//if (*sptr == 32767)  {
+			//	*sptr = dem_a.edge_fill;
+			//}
+
+			if (*sptr != dem_a.edge_fill)  {
+				num_elevs++;
+
+				if (*sptr != dem_a.void_fill)  {
+					if (min_elev_prof > *sptr)  {
+						min_elev_prof = *sptr;
+					}
+					if (max_elev_prof < *sptr)  {
+						max_elev_prof = *sptr;
+					}
+				}
+			}
+		}
+		//if (num_elevs == 0)  {
+		//	continue;
+		//}
+		y = dem_a.y_gp_first - (double)(dem_a.rows - 1) * dem_a.y_res;
+		i = dem_a.rows - 1;
+		while ((i >= 0) && (*(ptr + i * dem_a.cols + j) == dem_a.edge_fill))  {
+			y = y + dem_a.y_res;
+			i--;
+		}
+		sprintf(buf, "%6d%6d%6d%6d% 24.15E% 24.15E   0.0                  % 24.15E% 24.15E",
+			1, j + 1, num_elevs, 1, x, y, (double)min_elev_prof, (double)max_elev_prof);
+		for (k = 0; k < 144; k++)  {
+			if (buf[k] == 'E')  buf[k] = 'D';	// USGS files use 'D' for exponentiation.
+		}
+
+		/*
+		 * The header is ready to go.
+		 * Now just pump out data until it is all gone.
+		 */
+		k = 144;
+		for ( ; i >= 0; i--)  {
+			sptr = ptr + i * dem_a.cols + j;
+			if (*sptr == dem_a.edge_fill)  {
+				break;
+			}
+			sprintf(&buf[k], "%6d", *sptr);
+			k = k + 6;
+			if ((k > (DEM_RECORD_LENGTH - 6)) || (i == 0))  {
+				/* The record is full.  Write it out. */
+				for ( ; k < DEM_RECORD_LENGTH; k++)  {
+					buf[k] = ' ';
+				}
+				if (write(output_fdesc, buf, DEM_RECORD_LENGTH) != DEM_RECORD_LENGTH)  {
+					fprintf(stderr, "Failed to write record to DEM file.\n");
+					exit(0);
+				}
+				k = 0;
+			}
+		}
+		if (k != 0)  {
+			/* Write out a partial record, if necessary. */
+			for ( ; k < DEM_RECORD_LENGTH; k++)  {
+				buf[k] = ' ';
+			}
+			if (write(output_fdesc, buf, DEM_RECORD_LENGTH) != DEM_RECORD_LENGTH)  {
+				fprintf(stderr, "Failed to write record to DEM file.\n");
+				exit(0);
+			}
+		}
+	}
+
+
+	/*
+	 * If we have data for a type C record, then output the record.
+	 */
+	if (dem_a.accuracy != 0)  {
+		sprintf(buf, "%6d%6d%6d%6d%6d%6d%6d%6d%6d%6d",
+			dem_c.datum_stats_flag, dem_c.datum_rmse_x, dem_c.datum_rmse_y,
+			dem_c.datum_rmse_z, dem_c.datum_sample_size,
+			dem_c.dem_stats_flag, dem_c.dem_rmse_x, dem_c.dem_rmse_y,
+			dem_c.dem_rmse_z, dem_c.dem_sample_size);
+		for (k =  60; k < DEM_RECORD_LENGTH; k++)  {
+			buf[k] = ' ';
+		}
+		if (write(output_fdesc, buf, DEM_RECORD_LENGTH) != DEM_RECORD_LENGTH)  {
+			fprintf(stderr, "Failed to write record to DEM file.\n");
+			exit(0);
+		}
+	}
+
+
+	close(output_fdesc);
+}
+
+
+
+
+/*
+ * Using the parameters from the dem_a structure, prepare an output DEM
+ * file header in the classic format (the non-SDTS format).
+ */
+void
+gen_header(char *buf, struct dem_record_type_a *dem_a, struct datum *datum)
+{
+	long i;
+	long d, m;
+	double s;
+
+	/*
+	 * Begin by putting a title at the start of the header.
+	 * There is space at the start of the header for 40 bytes of title, followed
+	 * by 40 bytes of free-form text, followed by 29 bytes of blank fill.
+	 * Just fill the first 40 bytes with whatever is in the dem_a.title array,
+	 * fill the free form area with blanks,
+	 * and add the blank fill.  (The dem_a.title array contains all of the title
+	 * text that we found in the *IDEN.DDF SDTS file.)
+	 */
+	sprintf(buf, "%-40.40s                                                                     ", dem_a->title);
+
+	/*
+	 * Now we need the southeast corner latitude/longitude in SDDDMMSS.SSSS format.
+	 * Followed by the level code, elevation pattern, plane ref, and zone.
+	 *
+	 * Used to print out the seconds with 7.4f, which is what the standard
+	 * says.  However, the last two digits should always be zero anyway,
+	 * and quantization error can produce incorrect-looking results.  By
+	 * setting the format to 5.2f, we round the data to more correct-looking
+	 * values.
+	 */
+	decimal_degrees_to_dms(dem_a->se_long, &d, &m, &s);
+	sprintf(&buf[109], "% 3d%2.2d%05.2f  ", d, m, s);
+	decimal_degrees_to_dms(dem_a->se_lat, &d, &m, &s);
+	sprintf(&buf[122], " % 2d%2.2d%05.2f  ", d, m, s);
+	/*
+	 * Byte 136 is a process code.
+	 * Byte 137 is blank fill. Bytes 138-140 are a sectional indicator for
+	 * 30-minute DEMs.  Until we find a sample 30-minute DEM, we
+	 * will just make this field blank.
+	 */
+	sprintf(&buf[135], "%1.1d    ", dem_a->process_code);
+	sprintf(&buf[140], "%-4.4s%6d%6d%6d%6d", dem_a->origin_code, dem_a->level_code,
+		dem_a->elevation_pattern, dem_a->plane_ref, dem_a->zone);
+	
+	/*
+	 * There are 360 bytes of projection parameters, which are unused.
+	 * They are ordinarily set to 0.
+	 */
+	strcat(&buf[168], "   0.0                     0.0                     0.0                     0.0                     0.0                  ");
+	strcat(&buf[288], "   0.0                     0.0                     0.0                     0.0                     0.0                  ");
+	strcat(&buf[408], "   0.0                     0.0                     0.0                     0.0                     0.0                  ");
+
+	/*
+	 * Some more misc parameters:  plane units, elevation units, number of sides.
+	 */
+	sprintf(&buf[528], "%6d%6d%6d", dem_a->plane_units, dem_a->elev_units, 4);
+
+	/*
+	 * The next block of data contains the four corners of the DEM,
+	 * in ground planimetric coordinates, followd by the minimum and
+	 * maximum elevation.
+	 * The USGS uses 'D' for exponentiation, rather than 'E', so change the 'E'
+	 * characters to 'D'.  The USGS appears to always have the pre-decimal-point
+	 * digit be a zero or a blank.  That is hard to do, so we won't bother.
+	 */
+	sprintf(&buf[546], "% 24.15E% 24.15E", dem_a->sw_x_gp, dem_a->sw_y_gp);
+	sprintf(&buf[594], "% 24.15E% 24.15E", dem_a->nw_x_gp, dem_a->nw_y_gp);
+	sprintf(&buf[642], "% 24.15E% 24.15E", dem_a->ne_x_gp, dem_a->ne_y_gp);
+	sprintf(&buf[690], "% 24.15E% 24.15E", dem_a->se_x_gp, dem_a->se_y_gp);
+	sprintf(&buf[738], "% 24.15E% 24.15E", (double)dem_a->min_elev, (double)dem_a->max_elev);
+	for (i = 546; i < 786; i++)  {
+		if (buf[i] == 'E')  buf[i] = 'D';	// USGS files use 'D' for exponentiation.
+	}
+
+	/*
+	 * Now comes the angle between the reference system and the DEM.
+	 * This should always be zero.
+	 */
+	sprintf(&buf[786], "   0.0                  ");
+
+	/*
+	 * The accuracy code.  Zero means unknown.
+	 */
+	sprintf(&buf[810], "%6d", dem_a->accuracy);
+
+	/*
+	 * Now comes a three-element array of spatial resolution units:  x, y, and z.
+	 */
+	sprintf(&buf[816], "%12.6E%12.6E%12.6E", dem_a->x_res, dem_a->y_res, dem_a->z_res);
+	for (i = 816; i < 852; i++)  {
+		if (buf[i] == 'E')  buf[i] = 'D';	// USGS files use 'D' for exponentiation.
+	}
+
+	/*
+	 * Now we insert the columns/rows.  The number of columns is set to 1 at this
+	 * point.  Each elevation profile contains the correct number of columns for
+	 * that profile.  However, since we will be rotating the data by 90 degrees,
+	 * we need to put in the columns value for what would otherwise be the row value.
+	 */
+	sprintf(&buf[852], "%6d%6d", 1, dem_a->cols);
+
+	/*
+	 * From this point on, the values are only defined if one is using the
+	 * newer version of the DEM header.  We will leave them all blank, except
+	 * that we will insert the horizontal and vertical datum information, since
+	 * they are quite useful.
+	 *
+	 * We could also pry the other fields out of SDTS, but it is painful
+	 * because they are generally embedded in long text strings.
+	 */
+	sprintf(&buf[864], "                        ");
+	sprintf(&buf[888], "%2d", dem_a->vertical_datum);
+	sprintf(&buf[890], "%2d", dem_a->horizontal_datum);
+	sprintf(&buf[892], "                ");
+	sprintf(&buf[908], "% 7.2f", dem_a->vertical_datum_shift);
+	sprintf(&buf[915], "                                                                                                             ");
+}

Added: packages/drawmap/branches/upstream/current/sdts2dlg.1n
===================================================================
--- packages/drawmap/branches/upstream/current/sdts2dlg.1n	                        (rev 0)
+++ packages/drawmap/branches/upstream/current/sdts2dlg.1n	2007-05-18 12:29:03 UTC (rev 834)
@@ -0,0 +1,135 @@
+.TH SDTS2DLG 1 "Jul 24, 2001" \" -*- nroff -*-
+.SH NAME
+sdts2dlg \- Convert a USGS SDTS DLG to a DLG in the `optional' format.
+.SH SYNOPSIS
+.B sdts2dlg
+[-L] | [sdts_dlg_file.ddf [output_file_name]]
+
+.SH DESCRIPTION
+The U.S. Geological Survey (USGS) provides sites on the Internet with
+a lot of Digital Line Graph (DLG) data.
+Depending on the resolution of the data, it may be available in one
+of at least two different formats:  the `optional' DLG format,
+or the Spatial Data Transfer System (SDTS) format.
+The 24K DLG data (which are also called 7.5-minute DLG data)
+are only available for free download in the SDTS format.
+.PP
+The
+.I drawmap
+program can read files in SDTS format; but the SDTS information
+comes in the form of archives, each of which contains numerous files.
+It may sometimes be more efficient, and perhaps simpler,
+to store the data in the `optional' format.
+.I Sdts2dlg
+is a program that converts each SDTS archive into a single optional-format
+DLG file.
+Currently
+.I sdts2dlg
+only works with 24K and 100K DLG data.
+.PP
+If you invoke the program with the "-L" argument, it will print some
+license information and exit.
+In normal use, the first argument is an SDTS file name.
+.PP
+Each SDTS DLG archive should contain one or more files with names
+of the form ????LE@@.DDF,
+where the '?' symbol stands for any single character, and the '@'
+symbol stands for any single digit.  If you provide a single
+such file as an argument,
+.I sdts2dlg
+will produce an optional-format DLG file, based on the given SDTS file
+and the other files in the SDTS archive.
+(When you unpack the SDTS archives, you can change all of the resulting
+file names to all lower case and/or compress all of the files
+with the
+.I gzip
+program.  If you are going to change to lower case, change all of the files.
+If you are going to compress the files, compress all of them.)
+.PP
+The USGS takes each 1-degree-square block of latitude and longitude, and divides it
+into an eight-by-eight grid of 7.5-minute-square `quads'.
+The rows of this grid are labeled 'a' to 'h' from
+bottom to top, and the columns are labeled `1' through `8' from right
+to left.  Each quad is then referred to by a name of the form `AABBBCD',
+where `AA' is the latitude of the southeast corner of the 1-degree block,
+`BBB' is the longitude of the southeast corner,
+and `C' and `D' represent the corresponding row and column labels.
+If you don't specify an output file name, the output file produced by
+.I sdts2dlg
+will have the form `AABBBCD.dlg'.
+For 100K DLG files, each of which generally covers 16 quads, this
+naming is also used, with the `C' and `D' code being the same
+as for the quad with the same southeast corner as the 100K DLG data.
+If you specify an output file name, then your name will be used instead.
+.SH LIMITATIONS
+.I Sdts2dlg
+attempts to recreate the original DLG-3 file, as it was
+before conversion to SDTS.
+Node entries include line lists, but no area lists.
+Area entries include line lists, no node lists, and no area
+coordinate lists.
+Line entries include line coordinate lists.
+Attributes are included as major-minor pairs.
+.PP
+Try as it might, though,
+.I sdts2dlg
+is going to produce files that differ from the originals.
+In the Node entries, for example, the
+line lists have no preferred ordering, and the output from
+.I sdts2dlg
+is unlikely to reproduce the ordering of the original DLG file.
+In the Area entries, the line lists do have a specified ordering,
+but there is no rule for choosing which line to start each
+list with.  Thus, the lists are unlikely to be exact duplicates
+of the original file.
+Neither of these differences should be a problem.  The
+lists are still valid, even if they are not identical to the
+originals.
+.PP
+For the universe polygon (which completely surrounds the
+data coverage area) the Universal Transverse Mercator (UTM)
+coordinates of its representative point are likely to be different
+from the originals.
+The SDTS files do not appear to encode this particular representative
+point, so
+.I sdts2dlg
+inserts the coordinates of the southwest corner of the map area.
+.PP
+Some floating-point numbers will have a different format from their original
+USGS versions.  The USGS files normally put the first significant
+digit after the decimal point, while
+.I sdts2dlg
+puts it in front of the decimal point.  The actual numeric values are the same,
+but the format differs.
+.PP
+There are two fields
+in the header that don't always show up in existing optional-format
+files, although the specification allows them:  the horizontal datum
+and the vertical datum.  These are too useful to leave out, so
+.I sdts2dlg
+includes them.
+.PP
+Finally, there may be data discrepancies due to errors in the
+program.  Errors are a distinct possibility when it comes to
+attributes.  There are many different kinds of possible attributes, and
+I have not yet found adequate test data for some of them.
+.SH SEE ALSO
+.I drawmap(1), sdts2dem(1)
+\" =========================================================================
+\" sdts2dlg.1 - The manual page for the sdts2dlg program.
+\" Copyright (c) 2000,2001  Fred M. Erickson
+\"
+\" This program is free software; you can redistribute it and/or modify
+\" it under the terms of the GNU General Public License as published by
+\" the Free Software Foundation; either version 2, or (at your option)
+\" any later version.
+\"
+\" This program is distributed in the hope that it will be useful,
+\" but WITHOUT ANY WARRANTY; without even the implied warranty of
+\" MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+\" GNU General Public License for more details.
+\"
+\" You should have received a copy of the GNU General Public License
+\" along with this program; if not, write to the Free Software
+\" Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
+\" =========================================================================

Added: packages/drawmap/branches/upstream/current/sdts2dlg.c
===================================================================
--- packages/drawmap/branches/upstream/current/sdts2dlg.c	                        (rev 0)
+++ packages/drawmap/branches/upstream/current/sdts2dlg.c	2007-05-18 12:29:03 UTC (rev 834)
@@ -0,0 +1,179 @@
+/*
+ * =========================================================================
+ * sdts2dlg.c - A program to convert USGS SDTS DLG files to optional-format USGS DLG files.
+ * Copyright (c) 2000  Fred M. Erickson
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2, or (at your option)
+ * any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program; if not, write to the Free Software
+ * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
+ * =========================================================================
+ */
+
+
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <math.h>
+#include <stdlib.h>
+#include <unistd.h>
+#include <stdio.h>
+#include <errno.h>
+#include <time.h>
+#include <string.h>
+#include "drawmap.h"
+#include "dlg.h"
+#include "sdts_utils.h"
+
+
+
+long get_extra_attrib(long, long *major, long *minor, struct subfield *subfield);
+
+
+void
+license(void)
+{
+	fprintf(stderr, "This program is free software; you can redistribute it and/or modify\n");
+	fprintf(stderr, "it under the terms of the GNU General Public License as published by\n");
+	fprintf(stderr, "the Free Software Foundation; either version 2, or (at your option)\n");
+	fprintf(stderr, "any later version.\n\n");
+
+	fprintf(stderr, "This program is distributed in the hope that it will be useful,\n");
+	fprintf(stderr, "but WITHOUT ANY WARRANTY; without even the implied warranty of\n");
+	fprintf(stderr, "MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n");
+	fprintf(stderr, "GNU General Public License for more details.\n\n");
+
+	fprintf(stderr, "You should have received a copy of the GNU General Public License\n");
+	fprintf(stderr, "along with this program; if not, write to the Free Software\n");
+	fprintf(stderr, "Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.\n");
+}
+
+
+
+/*
+ * This program takes an SDTS file set as input,
+ * and produces a single optional-format DLG file
+ * as output.
+ */
+main(int argc, char *argv[])
+{
+	int dlg_fdesc;
+	long length;
+	long gz_flag;
+	ssize_t (*read_function)();
+
+
+	if ((argc == 2) && (argv[1][0] == '-') && (argv[1][1] == 'L'))  {
+		license();
+		exit(0);
+	}
+	if ((argc != 2) && (argc != 3))  {
+		fprintf(stderr, "Usage:  %s ????LE@@.DDF [output_file_name]\n", argv[0]);
+		fprintf(stderr, "        Where the ???? are alphanumeric characters, and @ represents a digit.\n");
+		exit(0);
+	}
+
+
+	/* Find file name length. */
+	length = strlen(argv[1]);
+	if (length < 12)  {
+		fprintf(stderr, "File name %s appears too short to be valid.  Should look like ????LE@@.DDF\n", argv[1]);
+		exit(0);
+	}
+
+	/*
+	 * Figure out if the file is gzip-compressed or not.
+	 */
+	if ((strcmp(&argv[1][length - 3], ".gz") == 0) ||
+	    (strcmp(&argv[1][length - 3], ".GZ") == 0))  {
+		gz_flag = 1;
+		if ((dlg_fdesc = buf_open_z(argv[1], O_RDONLY)) < 0)  {
+			fprintf(stderr, "Can't open %s for reading, errno = %d\n", argv[1], errno);
+			exit(0);
+		}
+		read_function = buf_read_z;
+	}
+	else  {
+		gz_flag = 0;
+		if ((dlg_fdesc = buf_open(argv[1], O_RDONLY)) < 0)  {
+			fprintf(stderr, "Can't open %s for reading, errno = %d\n", argv[1], errno);
+			exit(0);
+		}
+		read_function = buf_read;
+	}
+
+
+	/*
+	 * Files in Spatial Data Transfer System (SDTS) format are markedly
+	 * different from the optional-format DLG files.
+	 *
+	 * We insist that the user specify one, single, SDTS file
+	 * on the command line.
+	 * The file must be the one whose name has the form ????LE??.DDF
+	 * (or ????le??.ddf), and it may have a .gz on the end if it is gzip
+	 * compressed.
+	 *
+	 * We allow the files to be gzip-compressed, and they can have either
+	 * ".gz" or ".GZ" on the end.  However, we insist that the rest of
+	 * the file name have consistent case.  That is, if the 'F' or 'f'
+	 * in the ".DDF" or ".ddf" is in a given case, the rest of the file
+	 * had better be in that same case.
+	 *
+	 * If the following "if" test succeeds, we assume we have an SDTS file.
+	 */
+	if (((length >= 7) && (gz_flag != 0) &&
+	     ((strncmp(&argv[1][length - 7], ".ddf", 4) == 0) ||
+	      (strncmp(&argv[1][length - 7], ".DDF", 4) == 0))) ||
+	    ((length >= 4) && (gz_flag == 0) &&
+	     ((strcmp(&argv[1][length - 4], ".ddf") == 0) ||
+	      (strcmp(&argv[1][length - 4], ".DDF") == 0))))  {
+		/* SDTS file */
+
+		/* Close the file.  We will reopen it in parse_full_dlg_sdts(). */
+		if (gz_flag == 0)  {
+			buf_close(dlg_fdesc);
+		}
+		else  {
+			buf_close_z(dlg_fdesc);
+		}
+
+		/*
+		 * Check that the file name takes the form that we expect.
+		 */
+		if (((gz_flag != 0) &&
+		     ((strncmp(&argv[1][length - 11], "le", 2) != 0) &&
+		      (strncmp(&argv[1][length - 11], "LE", 2) != 0))) ||
+		    ((gz_flag == 0) &&
+		     (strncmp(&argv[1][length - 8], "le", 2) != 0) &&
+		     (strncmp(&argv[1][length - 8], "LE", 2) != 0)))  {
+			fprintf(stderr, "The file %s looks like an SDTS file, but the name doesn't look right.\n", argv[1]);
+			exit(0);
+		}
+
+
+		/*
+		 * The input file name looks okay.  Let's launch into the information parsing.
+		 *
+		 * process_dlg_sdts() will create and write the output file.
+		 */
+		if (argc == 3)  {
+			if (process_dlg_sdts(argv[1], argv[2], gz_flag, (struct image_corners *)0, 0, 1) != 0)  {
+				exit(0);
+			}
+		}
+		else  {
+			if (process_dlg_sdts(argv[1], (char *)0, gz_flag, (struct image_corners *)0, 0, 1) != 0)  {
+				exit(0);
+			}
+		}
+	}
+}

Added: packages/drawmap/branches/upstream/current/sdts_utils.c
===================================================================
--- packages/drawmap/branches/upstream/current/sdts_utils.c	                        (rev 0)
+++ packages/drawmap/branches/upstream/current/sdts_utils.c	2007-05-18 12:29:03 UTC (rev 834)
@@ -0,0 +1,1717 @@
+/*
+ * =========================================================================
+ * sdts_utils.c - Utility routines for SDTS files.
+ * Copyright (c) 2000,2001  Fred M. Erickson
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2, or (at your option)
+ * any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program; if not, write to the Free Software
+ * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
+ * =========================================================================
+ */
+
+#include <fcntl.h>
+#include <math.h>
+#include <stdlib.h>
+#include <unistd.h>
+#include <stdio.h>
+#include <string.h>
+#include <errno.h>
+#include <string.h>
+#include "drawmap.h"
+#include "sdts_utils.h"
+
+
+/*
+ * The routines in this file are used to open, read, and close
+ * files in the Spatial Data Transfer System (SDTS) format.
+ * Specifically, they read files in the ISO 8211 encoding format.
+ * The truly SDTS-specific stuff is handled at a higher layer.
+ *
+ *
+ * There is a lot of code in this file, but only three
+ * functions that are normally used as entry points:
+ *
+ *	int begin_ddf(char *file_name)
+ *
+ *	long get_subfield(struct subfield *subfield)
+ *
+ *	void end_ddf()
+ *
+ * Call begin_ddf(), with the name of an SDTS file as argument,
+ * to begin parsing an ISO 8211 file.  Call end_ddf() when you are
+ * done with that file.  YOU CAN ONLY HAVE ONE FILE OPEN AT ONCE.
+ *
+ * get_subfield() returns the subfields of the file, one at a time.
+ * If you call it, and it returns 1, then you have retrieved a
+ * subfield.  If it returns 0, you have reached end of file.
+ * If there is an error, the function calls exit().
+ *
+ * If you want to try parsing some sample DDF files, there is a
+ * commented-out test program at the end of this file which
+ * will try to read and print out the contents of a DDF file.
+ * It illustrates the use of the three functions.
+ *
+ *
+ * Note that these routines are nowhere near a complete
+ * implementation of the ISO 8211 standard.  Large chunks are
+ * missing, since the purpose of these routines is only to parse
+ * USGS-formatted SDTS files.  Furthermore, the routines may
+ * contain errors due to me misunderstanding the standard.  (I
+ * don't have a copy of the standard, and have gleaned the information
+ * contained here via various tutorials on the Internet.)  Also,
+ * some sections of code remain untested because I don't have any
+ * sample files to test them against.  (The code to read records
+ * longer than 100000 bytes is a case in point.)  Thus,
+ * you might be ill-advised to try to use these routines to parse
+ * randomly-selected ISO 8211 files.  There are library packages
+ * available to do that sort of thing.  fips123 and sdts++ are
+ * two possible starting points, but I don't know how complete
+ * their ISO 8211 support is.  Furthermore, there are at least
+ * two versions of ISO 8211, a 1985 version, and a newer 1994
+ * version.  We attempt to support only the older version here.
+ */
+
+
+/*
+ * We begin with a general description of SDTS, in an attempt to
+ * demystify things a bit.  Be warned that my understanding of SDTS
+ * has not reached a high-enough level for me to be considered an
+ * expert on it.  Furthermore, I have never seen a copy of the ISO 8211
+ * standard, which is probably necessary for a full understanding of SDTS.
+ * The information here is gleaned from tutorial sources on the Internet,
+ * and by prying various SDTS files apart to look at the insides.
+ * Thus, any or all of the description that follows may be wrong.  (The
+ * information was, however, sufficient to write a program that can obtain
+ * useful data from SDTS files.)  In particular, be wary of any
+ * terminology or acronyms.  In some cases, I had to guess what the correct
+ * terminology might be, since the sources I looked at varied in the names
+ * they called things.
+ *
+ * First of all, SDTS is intended as a standard for data transfer between
+ * dissimilar machines.  It is designed to package spatial data into a
+ * commonly-readable form so that the data can be passed back and forth
+ * between users with different computer systems.  As such, it is conceptually
+ * similar to parts of many other protocol suites, like the ISO 7-layer transfer
+ * protocols, the TCP/IP suite used on the Internet, morse code, and so on.
+ * In fact, I have seen ISO 8211 described as a possible implementation of
+ * the Presentation Layer (layer 6) of the ISO protocol.  I guess that would
+ * make SDTS an Application Layer (layer 7) protocol.  The 8211 standard is
+ * similar in form and function to other transfer encodings, such as Abstract
+ * Syntax Notation One (ASN.1), which is used in (among other things) the SNMP
+ * network-management suite.
+ *
+ * An SDTS transfer is composed of a number of files, all with the same
+ * basic format.  The files are called modules, and their names all end with the
+ * letters ".DDF" which stand for Data Definition File.  (This may not be
+ * a true statement for the 1994 version of ISO 8211.)  The files are
+ * structured according to the ISO/IEC 8211 standard.  There have been at
+ * least two versions of this standard, 1985 and 1994.  I think that SDTS files
+ * are restricted to the 1985 version, but can't claim this with certainty.
+ *
+ * In general, the file names have the form AAAABBBB.DDF, where AAAA is a name
+ * unique to the specific transfer, like HY01 for a DLG hydrography transfer.
+ * The BBBB part of the name identifies the various modules in the transfer.
+ * For example, there is a HY01CATD.DDF file, which is a catalog of all of the
+ * modules in the transfer.  A 100K DLG hydrography transfer might contain four
+ * separate linear feature data files, named HY01LE01.DDF, HY01LE02.DDF,
+ * HY01LE03.DDF, and HY01LE04.DDF.  (I think that, in SDTS jargon, these
+ * would be called separate layers or separate topological manifolds, but I am
+ * not sure of the terminology.)  Each of these files corresponds to one of the
+ * four files in a hydrography directory under the old optional format.  However,
+ * unlike the old optional-format files, these files are not self-contained DLG
+ * packages.  Some of the information has been split off into other files.
+ * For example, in order to find the polygonal areas,
+ * we need to examine the polygon files, named HY01PC01.DDF, HY01PC02.DDF,
+ * HY01PC03.DDF, and HY01PC04.DDF.  These files contain record IDs that
+ * cross-reference attributes in the attribute file HY01AHYF.DDF.
+ *
+ * This brief description may not make much sense unless you are familiar with
+ * the contents of the old optional-format DLG files, so let's look into them a bit.
+ * (This is a simplified description.  Not horribly simplified, but simplified.
+ * Documents are available from the USGS web sites that describe the DLG format.)
+ * Each optional-format DLG file has a large header, which contains things like
+ * the latitude/longitude of the four corners of the data, and other global
+ * information.  Most, or all, of this has been moved into the SDTS HY01AHDR.DDF
+ * module.  Following the header are a list of Node records, which define all
+ * of the nodal points in the data.  (The points may define locations where line
+ * segments meet, or they may be points were small point-sized features are
+ * located, like a tiny pond, and so on.)  Each node is defined by a pair of
+ * UTM coordinates, and contains cross-references to the lines that
+ * intersect the node.  Following the nodes are the areas.
+ * Each area includes a pair of UTM coordinates that define a representative
+ * point for the area.  There are also a set of cross references to the lines
+ * that bound the area.  Following this may be one or more attribute codes
+ * that describe the type of area (e.g. a lake, or a marsh).  Following the areas
+ * are the linear features.  These contain sets of UTM coordinates that
+ * define a sequence of line segments that form (say) a river or road,
+ * or that form part or all of the boundary of (say) a lake.
+ * The linear features may also have attributes associated with them.
+ *
+ * In SDTS, all of this stuff gets swept into separate files.  The linear features
+ * are defined in one file, which also contains cross-references to attributes in another
+ * file, cross-references to polygons in yet another file, and cross-references to
+ * nodes in yet another file still.  It isn't so hard to understand how everything
+ * fits together, but it can be painful to chase down all of the data you need
+ * in the various files.
+ *
+ * Within each file, all of this information is encoded according to the 8211
+ * standard.  This standard is concerned with how to represent different types of data,
+ * and agglomerate them into larger structures.  Files (modules) are composed of
+ * records, records are composed of fields, and fields are composed of subfields.
+ * (This can be somewhat confusing, since common usage would lean more toward calling
+ * the smallest subdivision a field, and would then construct composite records
+ * from these fields.  C'est la vie.)
+ *
+ * The first record in a module is the Data Definition Record (or Data Descriptive
+ * Record, depending on who you talk to), and is called the DDR for short.  It
+ * contains some general information, and also a definition of the types of data
+ * contained in the various fields.
+ *
+ * This is followed by one or more Data Records (DRs).  These contain actual
+ * fields and subfield data.
+ *
+ * Whether it is a DDR or a DR, each record consists of a Leader, a Directory, and a
+ * Field Area.  The record starts with a leader, which contains some general info about
+ * the record, such as its length.  This is followed by a Directory, which contains
+ * (for each field) a tag, a data length, and an offset into the Field Area at which
+ * additional information appears.  For the DDR, the Field Area contains information
+ * that describes a particular field and the data it will contain.  For the DR,
+ * the Field Area contains the actual data for the field (which may include
+ * multiple chunks of data if the field is composed of several subfields).
+ *
+ * The 8211 standard defines how data is encoded for storage or transmission.
+ * However, at a higher level we need to define what that data is.  That is,
+ * we need to define all of the various tags, data types, data lengths, and
+ * so on, that constitute a DEM transfer, or a DLG transfer, or whatever.
+ * This is where SDTS provides "value-added".  It defines various kinds of
+ * modules that collectively allow the transfer of spatial data.  This is
+ * the modulization that we discussed above, in connection with a hypothetical
+ * hydrography transfer.  However, even this is not yet enough.  We still need
+ * more detail about the various subfields.  For example, in DLG files from
+ * the USGS, in SDTS format, the horizontal datum is stored in the XREF
+ * module, in the XREF field, in the HDAT subfield, as a three-character
+ * ASCII string.  If the string is "NAS", then the datum is NAD-27.  If
+ * the string is "NAX", then the datum is NAD-83.  Etcetera.
+ * As I understand it, this kind of information is not part of the SDTS
+ * standard, per se.  It is defined by the end users of the standard,
+ * in this case the USGS.  Thus, we need to understand SDTS at three
+ * levels:  (1) the low-level data encoding of ISO 8211, defined in the
+ * ISO 8211 standards document, (2) the higher level of abstraction of SDTS
+ * modules, defined in the SDTS standards document, and (3) the particular
+ * data encoding for a particular application, such as a DEM or DLG file.
+ * Each of these things is documented separately and, to a large extent,
+ * independently.
+ *
+ * Part of item (3), above, is the definition of SDTS "profiles".  These
+ * are specific implementations of SDTS, for specific purposes, that
+ * restrict SDTS to specific subsets of the possible options.  As far as
+ * I know, there are two such profiles:  the Topological Vector Profile (TVP)
+ * for vector information (such as DLG data); and the Raster Profile for information
+ * that naturally falls on a grid (such as DEM data).
+ *
+ * SDTS isn't conceptually difficult, but the details needed for a full
+ * understanding are scattered hither and yon.
+ */
+
+
+
+
+#define FIELD_TERMINATOR	0x1e
+#define UNIT_TERMINATOR		0x1f
+#define REC_LEN_LEN		5
+#define REC_LEADER_LEN		24
+#define MAX_TAGS		10	// Maximum number of field tags *we* allow in a record (not an ISO 8211 limit)
+#define MAX_SUBFIELDS		100	// Maximum number of subfield labels *we* allow in a user record (not an ISO 8211 limit)
+
+
+
+/*
+ * Some global state variables used by many of the subroutines.
+ *
+ * Note that the fact that these are global means that we can only
+ * have one DDF file open at once.
+ *
+ * If it becomes necessary to have more than one file open at once,
+ * we could put these variables into an array of structures, indexed
+ * by the file descriptor.
+ */
+static leaderless_flag;		// When non-zero, we have encountered a record leader with a Leader ID of 'R'
+static char *ddr_buf = (char *)0;	// DDR record buffer.
+static char *dr_buf = (char *)0;	// DR record buffer.
+static long gz_flag;	// If non-zero, we are reading a gzip-compressed file.
+static ssize_t (*read_function)(int, void *, size_t);
+static int fdesc;	// File descriptor of the open DDF file.
+static long dr_tag;	// Next-available field in the DR.
+static long dr_label;	// Next-available subfield in the field.
+
+
+/*
+ * SDTS files are organized into records, fields, and subfields.
+ * The basic structure of each record, whether it is the DDR, or one of the
+ * DRs, is the same.  (The low-level details differ.)
+ *
+ * We begin by describing the DDR.
+ *
+ * The record begins with a 24-byte Leader, containing general information
+ * about the record.
+ */
+struct record_leader  {
+	long length;		// Record Length.  Integer stored as five ASCII bytes.
+	long ichg_level;	// Interchange Level.  Integer stored as one ASCII byte.
+				// '1', '2', or '3' for DDR. ' ' for DR.
+				//
+	char leader_id;		// Leader Identifier.  A single ASCII byte.  Must be 'L' for DDR, 'D' or 'R' for DR.
+	char ice_ind;		// Inline Code Extension.  A single ASCII byte.
+	char reserved_space;	// Reserved Space.  A single ASCII byte.
+	char application;	// Application Indicator.  A single ASCII byte.
+	long field_cntrl_len;	// Field Control Indicator.  Integer stored as two ASCII bytes.
+				// (In the DDR, must be 0 if Interchange Level is 1, must be 6 for
+				// Interchange Level 2 or 3.  In DR, it appears to always be "  ".)
+				//
+	long fa_addr;		// Base address of Field Area.  Integer stored as 5 ASCII bytes.
+				// (Addresses start at zero.)
+				//
+	char ccs[3];		// Code Character Set Indicator.  Three ASCII bytes.
+	long field_len_len;	// Size of Field Length.  Integer stored as one ASCII byte.  (1 <= length <=9)
+	long field_pos_len;	// Size of Field Position.  Integer stored as one ASCII byte.  (1 <= length <=9)
+	long reserved_digit;	// Reserved Digit.  Integer stored as one ASCII byte.
+				// (As near as I can tell, this is simply reserved for future use.)
+				//
+	long field_tag_len;	// Size of Field Tag.  Integer stored as one ASCII byte.  (1 <= length <=7)
+				// Subfield labels are not restricted by this length.
+};
+/*
+ * Following this is the DDR Directory,
+ * whose length is equal to the Base Address of the Field Area
+ * minus the Leader length minus 1.  The minus 1 is for the field terminator,
+ * which immediately follows the Directory.
+ *
+ * The Directory consists of consecutive instances of the triple
+ * (tag, field length, field position), one triple for each tag defined.
+ *
+ * Following the Directory, there is a Field Area, containing information
+ * about each field.  This area contains the Field Control string,
+ * Field Name, subfield labels (similar to tags), and subfield formats.
+ * Although not technically part of the directory, these items are also
+ * included in the ddr_directory structure so that everything is in one place.
+ *
+ * Not all of the extra items will be present for each field.
+ * Rather than do a bunch of malloc/free operations to
+ * allocate appropriate space, we just provide arrays of pre-defined
+ * size, which are hopefully big enough for all files we will encounter.
+ * This is somewhat wasteful, and subtracts from the generality of the
+ * code, but also makes it easier to write and debug the code.
+ * Later, after the code is more seasoned, we can change it to be more
+ * efficient (and more general) if we wish.
+ *
+ * We don't provide storage space for subfield labels and formats.
+ * Instead, we will just provide pointers into the storage buffer
+ * for the entire DDR.
+ */
+struct ddr_directory  {
+	/*
+	 * Tag "0000" indicates a file-control entry.
+	 *    If Interchange Level is 2 or 3, we need to read 6-byte field control.
+	 * Tag "0001" indicates the DDF Record Identifier.
+	 *    If Interchange Level is 2 or 3, we need to read 6-byte field control.
+	 * Tag "0002" indicates user-augmented file description.
+	 *    No field control, as far as I know.
+	 * Tag "????" indicates user data.
+	 *    If Interchange Level is 2 or 3, we need to read 6-byte field control.
+	 */
+	char *tag;		// Tag.  ASCII bytes.  Its length is specified by the
+				// Size of Field Tag in the record_leader.  I think these are
+				// restricted to 7 bytes, and we include an extra byte for a null.
+	long field_len;		// Field Length.  Integer stored as ASCII bytes.
+				// Its length is specified by the Size of Field Length in the record_leader.
+	long field_pos;		// Field Position.  Integer stored as ASCII bytes.  Its length is
+				// specified by the Size of Field Position in the record_leader.
+	char *field_cntrl;	// Field Control.  ASCII bytes.  Its length is specified by the Field
+				// Control Indicator in the record_leader.
+	char *name;		// The human-friendly name of the field.
+	char *labels[MAX_SUBFIELDS];	// Labels (similar to tags) for subfields.
+	char *formats[MAX_SUBFIELDS];	// Formats for subfields.
+	int  sizes[MAX_SUBFIELDS];	// The sizes from the formats, if they were provided.  (0 if they were not.)
+	char cartesian[MAX_SUBFIELDS];	// Cartesian delimiter flag.  If non-zero, the label had a '*' in front of it.
+	long num_labels;	// The number of user subfield labels in the record.
+	long num_formats;	// The number of user subfield formats in the record.
+};
+
+
+/*
+ * This is a structure for the DDR contents.
+ * It includes space for the Leader and Directory, but the
+ * contents of the Field Area reside in the DDR buffer, ddr_buf.
+ */
+static struct ddr  {
+	struct record_leader record_leader;
+	struct ddr_directory f0000;	// DDR Directory entry for file-control entry
+//	struct ddr_directory f0002;	// DDR Directory entry for user-augmented file description (currently unsupported)
+	struct ddr_directory user[MAX_TAGS]; // DDR Directory entry for user data entry
+	long num_tags;		// Total number of field tags stored in user[]
+} ddr;
+
+
+
+/*
+ * The Data Records also contain directories, but the Field Area
+ * contains actual data, rather than field description information.
+ * Thus we define a directory structure for the DRs that doesn't
+ * contain the extra field description information.
+ */
+struct dr_directory  {
+	char *tag;		// Tag.  ASCII bytes.  Its length is specified by the
+				// Size of Field Tag in the record_leader.  I think these are
+				// restricted to 7 bytes, and we include an extra byte for a null.
+	long field_len;		// Field Length.  Integer stored as ASCII bytes.
+				// Its length is specified by the Size of Field Length in the record_leader.
+	long field_pos;		// Field Position.  Integer stored as ASCII bytes.  Its length is
+				// specified by the Size of Field Position in the record_leader.
+};
+
+
+/*
+ * This is a structure for the DR contents.
+ * It includes space for the Leader and Directory, but the
+ * contents of the Field Area reside in the DR buffer, dr_buf.
+ */
+static struct dr  {
+	struct record_leader record_leader;
+	struct dr_directory user[MAX_TAGS]; // DR Directory entries
+	long num_tags;		// Total number of field tags stored in user[]
+} dr;
+
+
+
+
+/*
+ * Read in an ISO 8211 record.
+ * Fill in the record leader structure.
+ */
+static long
+read_record(struct record_leader *record_leader, char **rec_buf)
+{
+	long i;
+	char tmp[REC_LEN_LEN + 1];
+	char *ptr, *end_ptr;
+	ssize_t ret_val;
+	char save_byte;
+	long long_record_flag = 0;
+	long field_pos, field_len;
+
+	/*
+	 * Read in the record length, which is the first thing in the
+	 * record.
+	 */
+	if ((ret_val = read_function(fdesc, tmp, REC_LEN_LEN)) != REC_LEN_LEN)  {
+		if (ret_val == 0)  {
+			return 0;
+		}
+		else  {
+			fprintf(stderr, "Couldn't read record size from SDTS record.\n");
+			return -1;
+		}
+	}
+	tmp[REC_LEN_LEN] = '\0';
+	record_leader->length = strtol(tmp, (char **)0, 10);
+	if (record_leader->length == 0)  {
+		/*
+		 * Note: If the record length is zero, then that means the
+		 * record is over 100,000 bytes.  The procedure to deal with
+		 * this is to read and parse the Directory and find
+		 * the field length and field position of the last Directory
+		 * entry (the one just before the field terminator).
+		 * Adding these to the base address of the Field Area
+		 * yields the record length.  We will do this after
+		 * we parse the leader.  For now, we just set the record
+		 * length to be long enough for the Leader.
+		 *
+		 * However, before we give do this, make sure that
+		 * the record length is actually zero.  There may be
+		 * non-numeric characters where the record length is supposed
+		 * to be.  strtol() will ignore these and return zero, which
+		 * isn't strictly-speaking the correct interpretation of
+		 * a garbage-filled record length.
+		 */
+		for (i = 0; i < REC_LEN_LEN; i++) {
+			/* Check for leading blanks. */
+			if (tmp[i] != ' ') break;
+		}
+		for ( ; i < REC_LEN_LEN; i++)  {
+			/* Check for all zeros, following the blanks. */
+			if (tmp[i] != '0') break;
+		}
+		if (i < REC_LEN_LEN)  {
+			fprintf(stderr, "Warning: Record length is nonsensical.  Assuming end of file.\n");
+			return 0;
+		}
+
+		record_leader->length = REC_LEADER_LEN;
+		long_record_flag = 1;
+	}
+	else if (record_leader->length < REC_LEADER_LEN)  {
+		fprintf(stderr, "Record length is less than %d in SDTS record.\n", REC_LEADER_LEN);
+		return -1;
+	}
+
+	/*
+	 * Get space for the record, copy the already-read record length
+	 * into the beginning of the space, and read the remainder of the record.
+	 * (Or read just the Leader, if this is a long record.)
+	 */
+	if ((*rec_buf = (char *)malloc(record_leader->length + 1)) == (char *)0)  {
+		fprintf(stderr, "malloc(%d) returns null.\n", record_leader->length + 1);
+		return -1;
+	}
+	for (i = 0; i < REC_LEN_LEN; i++)  {
+		(*rec_buf)[i] = tmp[i];
+	}
+	if ((ret_val = read_function(fdesc, *rec_buf + REC_LEN_LEN, record_leader->length - REC_LEN_LEN)) !=
+									(record_leader->length - REC_LEN_LEN))  {
+		fprintf(stderr, "Couldn't read SDTS record.  Ret_val = %d.\n", ret_val);
+		free(*rec_buf);
+		return -1;
+	}
+	(*rec_buf)[record_leader->length] = '\0';	// Not really necessary, but I like to do it.
+
+
+	/*
+	 * Parse the record leader and put the results into the
+	 * record_leader structure.
+	 */
+	if ((*rec_buf)[REC_LEN_LEN] == ' ')  {	// The Interchange Level is a single digit, or blank
+		record_leader->ichg_level = -1;
+	}
+	else  {
+		record_leader->ichg_level = (*rec_buf)[REC_LEN_LEN] - '0';
+	}
+	record_leader->leader_id = (*rec_buf)[REC_LEN_LEN + 1];	// Leader Identifier.  A single ASCII byte.  'L' for DDR, 'D' or 'R' for DR.
+	record_leader->ice_ind = (*rec_buf)[REC_LEN_LEN + 2];		// Inline Code Extension.  A single ASCII byte.
+	record_leader->reserved_space = (*rec_buf)[REC_LEN_LEN + 3];	// Reserved Space.  A single ASCII byte.
+	record_leader->application = (*rec_buf)[REC_LEN_LEN + 4];	// Application Indicator.  A single ASCII byte.
+	if ((*rec_buf)[REC_LEN_LEN + 6] == ' ')  {	// Field Control Indicator.  Integer stored as two ASCII bytes.
+		record_leader->field_cntrl_len = -1;
+	}
+	else  {
+		record_leader->field_cntrl_len = ((*rec_buf)[REC_LEN_LEN + 5] - '0') * 10 + (*rec_buf)[REC_LEN_LEN + 6] - '0';
+	}
+	save_byte = (*rec_buf)[REC_LEN_LEN + 12]; (*rec_buf)[REC_LEN_LEN + 12] = '\0';
+	record_leader->fa_addr = strtol(*rec_buf + REC_LEN_LEN + 7, (char **)0, 10);	// Base address of Field Area.  Integer stored as 5 ASCII bytes.
+	(*rec_buf)[REC_LEN_LEN + 12] = save_byte;
+	record_leader->ccs[0] = (*rec_buf)[REC_LEN_LEN + 12];	// Code Character Set Indicator.  Three ASCII bytes.
+	record_leader->ccs[1] = (*rec_buf)[REC_LEN_LEN + 13];
+	record_leader->ccs[2] = (*rec_buf)[REC_LEN_LEN + 14];
+	record_leader->field_len_len = (*rec_buf)[REC_LEN_LEN + 15] - '0';	// Size of Field Length.  Integer stored as one ASCII byte.  (1 <= length <=9)
+	if ((record_leader->field_len_len < 1) || (record_leader->field_len_len > 9))  {
+		free(*rec_buf);
+		fprintf(stderr, "Field length length in record leader (%d) is out of bounds.\n", record_leader->field_len_len);
+		return -1;
+	}
+	record_leader->field_pos_len = (*rec_buf)[REC_LEN_LEN + 16] - '0';	// Size of Field Position.  Integer stored as one ASCII byte.  (1 <= length <=9)
+	if ((record_leader->field_pos_len < 1) || (record_leader->field_pos_len > 9))  {
+		free(*rec_buf);
+		fprintf(stderr, "Field position length in record leader (%d) is out of bounds.\n", record_leader->field_pos_len);
+		return -1;
+	}
+	if ((*rec_buf)[REC_LEN_LEN + 17] != ' ')  {
+		record_leader->reserved_digit = (*rec_buf)[REC_LEN_LEN + 17] - '0';	// Reserved Digit.  Integer stored as one ASCII byte.
+	}
+	else  {
+		record_leader->reserved_digit = -1;	// Reserved Digit.  Integer stored as one ASCII byte.
+	}
+	record_leader->field_tag_len = (*rec_buf)[REC_LEN_LEN + 18] - '0';	// Size of Field Tag.  Integer stored as one ASCII byte.  (1 <= length <=7)
+	if ((record_leader->field_tag_len < 1) || (record_leader->field_tag_len > 7))  {
+		free(*rec_buf);
+		fprintf(stderr, "Field tag length in record leader (%d) is out of bounds.\n", record_leader->field_tag_len);
+		return -1;
+	}
+
+	if (long_record_flag != 0)  {
+		/*
+		 * The record is longer than 100000 bytes.  Get the record the hard way.
+		 *
+		 * Start by re-sizing the record buffer to 100000 bytes, since
+		 * we know the record must be at least that long.
+		 */
+		if ((ptr = (char *)realloc(*rec_buf, 100000)) == (char *)0)  {
+			free(*rec_buf);
+			fprintf(stderr, "realloc(100000) returns null.\n");
+			return -1;
+		}
+		*rec_buf = ptr;
+
+		/*
+		 * Now search after the Leader to find the first FIELD_TERMINATOR.
+		 * This should be the end of the Directory.
+		 */
+		i = REC_LEADER_LEN;
+		while (((ret_val = read_function(fdesc, *rec_buf + i, 1)) == 1) && ((*rec_buf)[i] != FIELD_TERMINATOR))  {
+			i++;
+			if (i == 100000)  {
+				fprintf(stderr, "Failed to find end of Directory in first 100000 bytes.  This seems implausible.  Giving up.\n");
+				free(*rec_buf);
+				return -1;
+			}
+		}
+		if (ret_val != 1)  {
+			fprintf(stderr, "Couldn't read SDTS record.  Ret_val = %d.\n", ret_val);
+			free(*rec_buf);
+			return -1;
+		}
+
+		/*
+		 * If we make it to this point, then i should be the index of the Directory FIELD_TERMINATOR.
+		 * We need to backtrack from here to determine the last Field Length Length
+		 * and Field Position Length in the Directory.
+		 */
+		save_byte = (*rec_buf)[i]; (*rec_buf)[i] = '\0';
+		field_pos = strtol(*rec_buf + i - record_leader->field_pos_len, (char **)0, 10);
+		(*rec_buf)[i] = save_byte;
+		save_byte = (*rec_buf)[i - record_leader->field_pos_len]; (*rec_buf)[i - record_leader->field_pos_len] = '\0';
+		field_len = strtol(*rec_buf + i - record_leader->field_pos_len - record_leader->field_len_len, (char **)0, 10);
+		(*rec_buf)[i - record_leader->field_pos_len] = save_byte;
+
+		/*
+		 * Now we have what we need to figure out the
+		 * record length.  Figure it out and then re-size the
+		 * buffer accordingly.
+		 */
+		record_leader->length = record_leader->fa_addr + field_pos + field_len;
+		if ((ptr = (char *)realloc(*rec_buf, record_leader->length + 1)) == (char *)0)  {
+			free(*rec_buf);
+			fprintf(stderr, "realloc(%d) returns null.\n", record_leader->length + 1);
+			return -1;
+		}
+		*rec_buf = ptr;
+
+		/*
+		 * Now, at last, we are ready to read in the remainder of the record.
+		 * The remainder consists of the Field Area, since we have already read in
+		 * the Leader and the Directory.
+		 */
+		if ((ret_val = read_function(fdesc, *rec_buf + i + 1, record_leader->length - i - 1)) !=
+									(record_leader->length - i - 1))  {
+			fprintf(stderr, "Couldn't read SDTS record.  Ret_val = %d.\n", ret_val);
+			free(*rec_buf);
+			return -1;
+		}
+	}
+	(*rec_buf)[record_leader->length] = '\0';	// Not really necessary, but I like to do it.
+
+	return record_leader->length;
+}
+
+
+
+
+/*
+ * For testing purposes, print an SDTS DDR record structure.
+ */
+void
+print_ddr()
+{
+	long i;
+	long j;
+
+	fprintf(stderr, "ddr.record_leader.length = %d\n", ddr.record_leader.length);
+	fprintf(stderr, "ddr.record_leader.ichg_level = %d\n", ddr.record_leader.ichg_level);
+	if (ddr.record_leader.ichg_level >= 0)  {
+		if ((ddr.record_leader.ichg_level < 1) || (ddr.record_leader.ichg_level > 3))  {
+			fprintf(stderr, "Bad interchange level in DDR = %d.\n", ddr.record_leader.ichg_level);
+			exit(0);
+		}
+	}
+	fprintf(stderr, "ddr.record_leader.leader_id = \"%c\"\n", ddr.record_leader.leader_id);
+	fprintf(stderr, "ddr.record_leader.ice_ind = \"%c\"\n", ddr.record_leader.ice_ind);
+	fprintf(stderr, "ddr.record_leader.reserved_space = \"%c\"\n", ddr.record_leader.reserved_space);
+	fprintf(stderr, "ddr.record_leader.application = \"%c\"\n", ddr.record_leader.application);
+	fprintf(stderr, "ddr.record_leader.field_cntrl_len = %d\n", ddr.record_leader.field_cntrl_len);
+	if (ddr.record_leader.ichg_level >= 0)  {
+		if (((ddr.record_leader.ichg_level == 1) && (ddr.record_leader.field_cntrl_len != 0)) ||
+		    ((ddr.record_leader.ichg_level == 2) && (ddr.record_leader.field_cntrl_len != 6)) ||
+		    ((ddr.record_leader.ichg_level == 3) && (ddr.record_leader.field_cntrl_len != 6)))  {
+			fprintf(stderr, "Bad field control length in DDR = %d.\n", ddr.record_leader.field_cntrl_len);
+			exit(0);
+		}
+	}
+	fprintf(stderr, "ddr.record_leader.fa_addr = %d\n", ddr.record_leader.fa_addr);
+	if (ddr.record_leader.fa_addr < REC_LEADER_LEN)  {
+		fprintf(stderr, "Bad DDA address in DDR = %d.\n", ddr.record_leader.fa_addr);
+		exit(0);
+	}
+	fprintf(stderr, "ddr.record_leader.ccs = \"%c%c%c\"\n", ddr.record_leader.ccs[0], ddr.record_leader.ccs[1], ddr.record_leader.ccs[2]);
+	fprintf(stderr, "ddr.record_leader.field_len_len = %d\n", ddr.record_leader.field_len_len);
+	fprintf(stderr, "ddr.record_leader.field_pos_len = %d\n", ddr.record_leader.field_pos_len);
+	fprintf(stderr, "ddr.record_leader.reserved_digit = %d\n", ddr.record_leader.reserved_digit);
+	fprintf(stderr, "ddr.record_leader.field_tag_len = %d\n", ddr.record_leader.field_tag_len);
+	fprintf(stderr, "\n");
+
+	if (ddr.f0000.tag == (char *)0)  {
+		fprintf(stderr, "ddr.f0000 did not appear\n");
+	}
+	else  {
+		fprintf(stderr, "ddr.f0000.tag = \"%.*s\"\n", ddr.record_leader.field_tag_len, ddr.f0000.tag);
+		fprintf(stderr, "ddr.f0000.field_len = %d\n", ddr.f0000.field_len);
+		fprintf(stderr, "ddr.f0000.field_pos = %d\n", ddr.f0000.field_pos);
+		fprintf(stderr, "ddr.f0000.field_cntrl = \"%.*s\"\n", ddr.record_leader.ichg_level > 1 ? 6 : 0, ddr.f0000.field_cntrl);
+		fprintf(stderr, "ddr.f0000.name = \"%s\"\n", ddr.f0000.name);
+	}
+	fprintf(stderr, "\n");
+
+	for (i = 0; i < ddr.num_tags; i++)  {
+		fprintf(stderr, "ddr.user[%d].tag = \"%.*s\"\n", i, ddr.record_leader.field_tag_len, ddr.user[i].tag);
+		fprintf(stderr, "ddr.user[%d].field_len = %d\n", i, ddr.user[i].field_len);
+		fprintf(stderr, "ddr.user[%d].field_pos = %d\n", i, ddr.user[i].field_pos);
+		fprintf(stderr, "ddr.user[%d].field_cntrl = \"%.*s\"\n", i, ddr.record_leader.ichg_level > 1 ? 6 : 0, ddr.user[i].field_cntrl);
+		fprintf(stderr, "ddr.user[%d].name = \"%s\"\n", i, ddr.user[i].name);
+
+		for (j = 0; j < ddr.user[i].num_labels; j++)  {
+			fprintf(stderr, "ddr.user[%d].labels[%d] = \"%s\"\n", i, j, ddr.user[i].labels[j]);
+			fprintf(stderr, "ddr.user[%d].formats[%d] = \"%s\"\n", i, j, ddr.user[i].formats[j]);
+		}
+
+		fprintf(stderr, "\n");
+	}
+}
+
+
+
+
+
+/*
+ * Parse the DDR record and put all of the
+ * information into the DDR structure.
+ *
+ * We have already parsed the record leader, in the read_record() function.
+ * The values have been stored away in ddr.record_leader.
+ * This was done separately because the leader always has the same
+ * interpretation for every record, and we can parse and check it
+ * right after reading any record.
+ *
+ * Now we need to parse the rest of the DDR.  The DDR is a collection
+ * of fields that describe the fields and subfields in the following
+ * Data Records (DRs).
+ *
+ * We will be concerned with the two remaining (non-leader) portions of the DDR:
+ * the Directory and the Field Area.
+ *
+ * The Directory immediately follows the leader, without any intervening
+ * field terminator, and is followed by a field terminator (which I guess
+ * makes it the first field in the DDR).  The directory contains consecutive
+ * entries for each field description in the DDR.  Each entry consists of a field tag,
+ * followed by the field length (which is not the length of the corresponding field in
+ * the DR, but rather the length of the field (in the Field Area) that describes how
+ * the field will appear in the DR), followed by the field position
+ * (which is the offset of the field description from the start of the Field Area).
+ * The lengths (in the Directory) of each of these three items are specified in the leader.
+ * Thus, we read the leader to find the length of the "field length" in the Directory.
+ * We read the Directory to find the actual field length (which is not the length of the
+ * corresponding field in the DRs, but rather the length of the field in the Field Area
+ * that describes the DR field).
+ *
+ * Given the information from the Directory, we then parse the corresponding fields in
+ * the Field Area.  In general, the format of these appears to be:  a format control
+ * field (whose length is specified in the leader), a field name (which is the
+ * human-friendly name of the corresponding field ---  and is not the same as
+ * the tag of the corresponding field), a list of subfield labels, and a set
+ * of subfield format specifiers.  The latter two items only seem to appear for
+ * user-defined fields, and not for the special fields with tags "0000",
+ * "0001", "0002", and so on.  (I'm not so sure about field types "0002" and above,
+ * since I haven't come across any examples yet.)
+ *
+ * We need to read all of these things, and store them away in appropriate
+ * parts of the ddr structure.
+ */
+static void
+parse_ddr()
+{
+	long i;		// We use this as an index into the Directory
+	long j;		// We use this as an index into the Field Area
+	long k;
+	long size;
+	long repeat_count;
+	char save_byte;
+	char *ptr, *end_ptr;
+	ssize_t ret_val;
+
+	if ((ret_val = read_record(&(ddr.record_leader), &ddr_buf)) <= 0)  {
+		/* If ret_val is < 0, read_record() already printed an error message. */
+		if (ret_val == 0)  {
+			fprintf(stderr, "At end of file, reading DDR.  This should not happen.\n");
+		}
+		exit(0);
+	}
+	if (ddr.record_leader.leader_id != 'L')  {
+		fprintf(stderr, "DDR Leader ID is '%c'.  Can't handle this.\n", ddr.record_leader.leader_id);
+		exit(0);
+	}
+	/*
+	 * Should probably check, at this point, that the Interchange Level is not 3,
+	 * since we can't handle such files in general.  However, it
+	 * is possible that we can still parse them well enough to read
+	 * an SDTS file, so we will take the chance and try handling
+	 * level 3 files anyway.
+	 */
+
+	i = REC_LEADER_LEN;			// Start of DDR Directory
+	j = ddr.record_leader.fa_addr;	// Start of Field Area
+
+	/* Initialize so that we know nothing is initially present. */
+	ddr.f0000.tag = (char *)0;
+	ddr.f0000.name = (char *)0;
+	ddr.f0000.field_cntrl = (char *)0;
+	ddr.f0000.num_labels = 0;
+	ddr.num_tags = 0;
+
+	/*
+	 * First we examine the field tag to find out if it is one of the special
+	 * types of fields.  The special tags appear to be strings of all '0'
+	 * characters, with a '0', '1', or '2' on the end.  Because the whole tag,
+	 * except for the last digit, must be filled with '0' characters (at least
+	 * as far as I know, without a copy of the standard), we play some pointer games
+	 * and index into the comparison strings to get comparison strings of the
+	 * correct length.  This gives us the odd-looking construct:
+	 * "0000002" + 7 - ddr.record_leader.field_tag_len
+	 * If the tag length is 4, then we are really comparing against "0002".
+	 */
+	while (i < (ddr.record_leader.fa_addr - 1))  {
+		if (strncmp(&ddr_buf[i], "0000000", ddr.record_leader.field_tag_len) == 0)  {
+			/* This is a file-control tag. */
+			ddr.f0000.tag = &ddr_buf[i];
+			i = i + ddr.record_leader.field_tag_len;
+
+			k = i;
+			save_byte = ddr_buf[i + ddr.record_leader.field_len_len];
+			ddr_buf[i + ddr.record_leader.field_len_len] = '\0';
+			ddr.f0000.field_len = strtol(&ddr_buf[i], (char **)0, 10);
+			ddr_buf[i + ddr.record_leader.field_len_len] = save_byte;
+			i = i + ddr.record_leader.field_len_len;
+			ddr_buf[k] = '\0';	// Null-terminate tag.
+
+			save_byte = ddr_buf[i + ddr.record_leader.field_pos_len];
+			ddr_buf[i + ddr.record_leader.field_pos_len] = '\0';
+			ddr.f0000.field_pos = strtol(&ddr_buf[i], (char **)0, 10);
+			ddr_buf[i + ddr.record_leader.field_pos_len] = save_byte;
+			i = i + ddr.record_leader.field_pos_len;
+
+			if ((ddr.record_leader.ichg_level == 2) || (ddr.record_leader.ichg_level == 3))  {
+				ddr.f0000.field_cntrl = &ddr_buf[j];
+				j = j + ddr.record_leader.field_cntrl_len;
+			}
+			else  {
+				ddr.f0000.field_cntrl = (char *)0;
+			}
+
+			ddr.f0000.name = &ddr_buf[j];
+			k = ddr.record_leader.fa_addr + ddr.f0000.field_pos + ddr.f0000.field_len;
+			for ( ; j < k; j++)  {	// Search the whole field, if necessary, for a terminator.
+				if ((ddr_buf[j] == UNIT_TERMINATOR) || (ddr_buf[j] == FIELD_TERMINATOR))  {
+					break;
+				}
+			}
+			if (j == k)  {
+				fprintf(stderr, "The file appears defective.  Can't proceed.\n");
+				exit(0);
+			}
+			ddr_buf[j++] = '\0';	// Null terminate the end of the name.  The null-terminator may end up being the whole name if no name was present.
+			j = k;
+		}
+		else if (strncmp(&ddr_buf[i], "0000002" + 7 - ddr.record_leader.field_tag_len, ddr.record_leader.field_tag_len) == 0)  {
+			/* This is a user-augmented file description.  We don't know how to handle these. */
+			fprintf(stderr, "File contains field tag of \"0..2\".  Can't handle this.\n");
+			exit(0);
+		}
+		else if ((strncmp(&ddr_buf[i], "0000000" + 7 - ddr.record_leader.field_tag_len, ddr.record_leader.field_tag_len - 1) == 0) &&
+			 (ddr_buf[i + ddr.record_leader.field_tag_len - 1] >= '3') &&
+			 (ddr_buf[i + ddr.record_leader.field_tag_len - 1] <= '9'))  {
+			/* This is one of the other special tags that we can't handle. */
+			fprintf(stderr, "File contains field tag of \"0..%c\".  Can't handle this.\n", ddr_buf[i + ddr.record_leader.field_tag_len - 1]);
+			exit(0);
+		}
+		else  {
+			/* This is a plain old non-special tag.  (Which includes the pseudo-special "0..1" tag. */
+			if (ddr.num_tags == MAX_TAGS)  {
+				fprintf(stderr, "Ran out of space for field tags.  Can't proceed.\n");
+				exit(0);
+			}
+
+			/*
+			 * Initialize the various subfield storage spaces, in
+			 * case we don't find any labels and/or formats.
+			 */
+			for (k = 0; k < MAX_SUBFIELDS; k++)  {
+				ddr.user[ddr.num_tags].labels[k] = "";
+				ddr.user[ddr.num_tags].formats[k] = "";
+				ddr.user[ddr.num_tags].sizes[k] = 0;
+				ddr.user[ddr.num_tags].cartesian[k] = 0;
+			}
+
+			ddr.user[ddr.num_tags].tag = &ddr_buf[i];
+			i = i + ddr.record_leader.field_tag_len;
+
+			k = i;
+			save_byte = ddr_buf[i + ddr.record_leader.field_len_len];
+			ddr_buf[i + ddr.record_leader.field_len_len] = '\0';
+			ddr.user[ddr.num_tags].field_len = strtol(&ddr_buf[i], (char **)0, 10);
+			ddr_buf[i + ddr.record_leader.field_len_len] = save_byte;
+			i = i + ddr.record_leader.field_len_len;
+			ddr_buf[k] = '\0';	// Null-terminate tag.
+
+			save_byte = ddr_buf[i + ddr.record_leader.field_pos_len];
+			ddr_buf[i + ddr.record_leader.field_pos_len] = '\0';
+			ddr.user[ddr.num_tags].field_pos = strtol(&ddr_buf[i], (char **)0, 10);
+			ddr_buf[i + ddr.record_leader.field_pos_len] = save_byte;
+			i = i + ddr.record_leader.field_pos_len;
+
+			if ((ddr.record_leader.ichg_level == 2) || (ddr.record_leader.ichg_level == 3))  {
+				ddr.user[ddr.num_tags].field_cntrl = &ddr_buf[j];
+				j = j + ddr.record_leader.field_cntrl_len;
+			}
+			else  {
+				ddr.user[ddr.num_tags].field_cntrl = (char *)0;
+			}
+
+			ddr.user[ddr.num_tags].name = &ddr_buf[j];
+			k = ddr.record_leader.fa_addr + ddr.user[ddr.num_tags].field_pos + ddr.user[ddr.num_tags].field_len;
+			for ( ; j < k; j++)  {	// Search the whole field, if necessary, for a terminator.
+				if ((ddr_buf[j] == UNIT_TERMINATOR) || (ddr_buf[j] == FIELD_TERMINATOR))  {
+					break;
+				}
+			}
+			if (j == k)  {
+				fprintf(stderr, "The file appears defective.  Can't proceed.\n");
+				exit(0);
+			}
+			if ((ddr_buf[j] != UNIT_TERMINATOR) ||
+			    (ddr.record_leader.ichg_level == 1) ||
+			    (ddr.user[ddr.num_tags].field_cntrl[0] == '0'))  {
+				ddr_buf[j++] = '\0';	// Null terminate the end of the name.
+				ddr.num_tags++;
+				j = k;
+				continue;
+			}
+			ddr_buf[j++] = '\0';	// Null terminate the end of the name.
+
+			/*
+			 * If there are any subfield labels, we need to parse them.
+			 * They are normally separated by the '!' character, which is the vector
+			 * delimiter.  We null out the '!' characters so that the labels
+			 * will be null-terminated.
+			 *
+			 * We may also come across the '*' character, which is the
+			 * cartesian delimiter.  If we come across the latter, we remember
+			 * it for later use, and then null it just like the '!' delimiters.
+			 */
+			ddr.user[ddr.num_tags].num_labels = 0;
+			if (ddr_buf[j] != UNIT_TERMINATOR)  {
+				/*
+				 * Either we have some labels, or we are at end of field.
+				 * If we have labels, we locate them.
+				 * If we are at end-of-field, we fall through the loop.
+				 */
+
+				/* Check for leading '*' delimiter */
+				if ((j != k) && (ddr_buf[j] == '*'))  {
+					j++;
+					ddr.user[ddr.num_tags].cartesian[0] = 1;
+				}
+
+				/* Now, process the labels, if there are any. */
+				while ((j != k) && (ddr_buf[j] != UNIT_TERMINATOR) && (ddr_buf[j] != FIELD_TERMINATOR))  {
+					if (ddr.user[ddr.num_tags].num_labels == MAX_SUBFIELDS)  {
+						fprintf(stderr, "Ran out of space for subfield labels.  Can't proceed.\n");
+						exit(0);
+					}
+
+					ddr.user[ddr.num_tags].labels[ddr.user[ddr.num_tags].num_labels] = &ddr_buf[j];
+					j++;
+					for ( ; j < (k - 1); j++)  {
+						if ((ddr_buf[j] == '!') || (ddr_buf[j] == '*') ||
+						    (ddr_buf[j] == UNIT_TERMINATOR) || (ddr_buf[j] == FIELD_TERMINATOR))  {
+							break;
+						}
+					}
+					if (ddr_buf[j] == '*')  {
+						if (ddr.user[ddr.num_tags].num_labels == (MAX_SUBFIELDS - 1))  {
+							fprintf(stderr, "Ran out of space for subfield labels.  Can't proceed.\n");
+							exit(0);
+						}
+						ddr.user[ddr.num_tags].cartesian[ddr.user[ddr.num_tags].num_labels + 1] = 1;
+					}
+					if (ddr_buf[j] != '!')  {
+						ddr_buf[j++] = '\0';	// Null terminate the end of the tag.
+						ddr.user[ddr.num_tags].num_labels++;
+						break;
+					}
+					ddr_buf[j++] = '\0';	// Null terminate the end of the tag.
+					ddr.user[ddr.num_tags].num_labels++;
+				}
+			}
+			else  {
+				j++;
+			}
+
+			/*
+			 * If there are any format specifiers, we need to parse them.
+			 * They are delimited by '(' and ')', and separated by ',' characters.
+			 * They might look something like "(A,I,B,3I)" or "(A(3),I(4))".
+			 * The leading 3 is a repeat count, and the numbers in parentheses are
+			 * subfield sizes.
+			 *
+			 * Note that the format strings can be vastly more complex than what
+			 * we can handle here.  (They can include constructs like "(A(4),3(I(2),I(3)))"; and
+			 * "(A(,),I(5))" (where the "A(,)" is a comma-delimited ASCII string); and even
+			 * more complex forms.)  We provide just enough functionality to handle
+			 * known USGS data.  If it becomes necessary to handle more complex format
+			 * strings, we should probably move the parsing to a separate function.
+			 */
+			ddr.user[ddr.num_tags].num_formats = 0;
+			if (ddr_buf[j] != FIELD_TERMINATOR)  {
+				if ((k - j) > 3)  {	// We need at least "(?)" followed by a field terminator before we can proceed.
+					if ((ddr_buf[j] != '(') || (ddr_buf[k - 2] != ')') || (ddr_buf[k - 1] != FIELD_TERMINATOR))  {
+						fprintf(stderr, "Subfield format specification looks wrong.  Can't proceed.\n");
+						exit(0);
+					}
+					j++;
+					ddr_buf[k - 2] = '\0';
+					ddr_buf[k - 1] = '\0';
+
+					/*
+					 * Sometimes the () are double nested.  Haven't figured
+					 * out why yet, but it seems harmless to strip off the extra pair.
+					 */
+					if ((ddr_buf[j] == '(') && (ddr_buf[k - 3] == ')'))  {
+						j++;
+						ddr_buf[k - 3] = '\0';
+					}
+
+					while ((j < (k - 2)) && (ddr_buf[j] != '\0'))  {
+						repeat_count = 1;
+						if ((ddr_buf[j] >= '0') && (ddr_buf[j] <= '9'))  {
+							repeat_count = strtol(&ddr_buf[j], &end_ptr, 10);
+							j = j + end_ptr - &ddr_buf[j];
+						}
+						if (ddr_buf[j] == '\0')  {
+							fprintf(stderr, "Subfield format specification looks wrong.  Can't proceed.\n");
+							exit(0);
+						}
+
+						/* Find the subfield size, if there is one. */
+						if ((ptr = strchr(&ddr_buf[j], '(')) == (char *)0)  {
+							size = 0;
+						}
+						else  {
+							size = strtol(ptr + 1, (char **)0, 10);
+							if ((ddr_buf[j] == 'B') || (ddr_buf[j] == 'b'))  {	// Don't know if 'b' ever gets used. Check just in case.
+								/*
+								 * If binary format, convert bit size into byte size.
+								 * Bit fields whose size is not divisible by 8 are
+								 * allowed by the standard, but we can't handle them.
+								 * We assume all bit fields are simply 16-bit or
+								 * 32-bit numbers stored in binary format.
+								 */
+								if (size & 0x7)  {
+									fprintf(stderr, "Subfield size (%d) is not divisible by eight.\nThe standard allows this, but drawmap can't handle it.\n", size);
+									exit(0);
+								}
+								size = size >> 3;
+							}
+							if (size < 0)  {
+								fprintf(stderr, "Subfield size (%d) is unusable.\n", size);
+								exit(0);
+							}
+							if (size == 0)  {
+								/*
+								 * Note:  If size == 0, then there was something non-numeric
+								 * inside the parentheses.  We might want to check for
+								 * delimited strings at this point.  I don't want to
+								 * make the format parsing more complicated, though, until
+								 * I know exactly how the format is specified in the
+								 * standard.
+								 */
+								fprintf(stderr, "Warning: Subfield format string %s is unusual.  May cause trouble.\n", &ddr_buf[j]);
+							}
+						}
+
+						while(repeat_count > 0)  {
+							ddr.user[ddr.num_tags].formats[ddr.user[ddr.num_tags].num_formats] = &ddr_buf[j];
+							ddr.user[ddr.num_tags].sizes[ddr.user[ddr.num_tags].num_formats++] = size;
+							repeat_count--;
+						}
+
+						while ((ddr_buf[j] != ',') && (ddr_buf[j] != '\0'))  {
+							j++;
+						}
+						ddr_buf[j++] = '\0';
+					}
+				}
+			}
+
+			/*
+			 * It is okay for the labels and or formats to be missing.
+			 * However, if they are both present, we insist that there be
+			 * an equal number of each, because otherwise, we don't know
+			 * what to do.
+			 */
+			if ((ddr.user[ddr.num_tags].num_formats > 0) && (ddr.user[ddr.num_tags].num_labels > 0))  {
+				if (ddr.user[ddr.num_tags].num_formats != ddr.user[ddr.num_tags].num_labels)  {
+					fprintf(stderr, "File does not contain a format descriptor for each subfield.  Can't handle this.\n");
+					exit(0);
+				}
+			}
+
+			ddr.num_tags++;
+			j = k;
+		}
+	}
+}
+
+
+
+
+
+/*
+ * Parse the DR record and put all of the
+ * information into the DR structure.
+ *
+ * We have already parsed the record leader, in the read_record() function.
+ * The values have been stored away in dr.record_leader.
+ * This was done separately because the leader always has the same
+ * interpretation for every record, and we can parse and check it
+ * right after reading any record.
+ *
+ * Now we need to parse the rest of the DR.  The DR is similar in structure
+ * to the DDR.  There is again a Directory, right after the Leader, that gives a
+ * (tag, field length, field position) triple for each data field present.
+ * Following this is the Field Area, which contains the actual data.
+ *
+ * Parsing is quite similar to parsing the DDR.  The Directory is parsed
+ * in exactly the same way.  The Field Area is a simple listing of data,
+ * corresponding to the subfields of each of the fields.
+ */
+static void
+parse_dr()
+{
+	long i;		// We use this as an index into the Directory
+	long j;		// We use this as an index into the Field Area
+	long k;
+	char save_byte;
+
+	if (leaderless_flag != 0)  {
+		/*
+		 * Once the leaderless_flag has been set, we shouldn't be calling this function.
+		 */
+		fprintf(stderr, "parse_dr() called during leaderless processing.  Something is wrong.\n");
+		exit(0);
+	}
+
+	j = dr.record_leader.fa_addr;	// Start of Field Area
+
+	dr.num_tags = 0;
+
+
+	/*
+	 * Iterate through the directory entries and stick the data
+	 * into the dr structure.
+	 */
+	i = REC_LEADER_LEN;			// Start of DR Directory
+
+	while (i < (dr.record_leader.fa_addr - 1))  {
+		if (ddr.num_tags == MAX_TAGS)  {
+			fprintf(stderr, "Ran out of space for field tags.  Can't proceed.\n");
+			exit(0);
+		}
+
+		dr.user[dr.num_tags].tag = &dr_buf[i];
+		i = i + dr.record_leader.field_tag_len;
+
+		k = i;
+		save_byte = dr_buf[i + dr.record_leader.field_len_len];
+		dr_buf[i + dr.record_leader.field_len_len] = '\0';
+		dr.user[dr.num_tags].field_len = strtol(&dr_buf[i], (char **)0, 10);
+		dr_buf[i + dr.record_leader.field_len_len] = save_byte;
+		i = i + dr.record_leader.field_len_len;
+		dr_buf[k] = '\0';	// Null-terminate tag.
+
+		save_byte = dr_buf[i + dr.record_leader.field_pos_len];
+		dr_buf[i + dr.record_leader.field_pos_len] = '\0';
+		dr.user[dr.num_tags].field_pos = strtol(&dr_buf[i], (char **)0, 10);
+		dr_buf[i + dr.record_leader.field_pos_len] = save_byte;
+		i = i + dr.record_leader.field_pos_len;
+
+		dr.num_tags++;
+		j = k;
+	}
+
+	/*
+	 * If a record has a Leader ID of 'R' instead of 'D',
+	 * then this is the last Record Leader and Directory
+	 * in the file.  From this point on, we just keep reading
+	 * the Field Area over and over again, until we reach the
+	 * end of file, and we interpret it using the Directory
+	 * entry in the just-parsed Directory.
+	 */
+	if (dr.record_leader.leader_id == 'R')  {
+		leaderless_flag = 1;
+	}
+}
+
+
+
+
+/*
+ * When the user calls this function,
+ * we return the next available subfield from the
+ * file.  This routine depends on global
+ * static state information, since it must
+ * remember its state from one invocation to the next.
+ *
+ * This function returns 1 when it finds a subfield.
+ * It returns 0 at end of file.
+ * It exits on errors.
+ *
+ * In the subfield structure returned by this function,
+ * the subfield.tag, subfield.label, and subfield.format
+ * elements will be null-terminated.  The subfield.value
+ * element will not be null-terminated, and you must
+ * use the subfield.length element to find its end.
+ */
+long
+get_subfield(struct subfield *subfield)
+{
+	ssize_t ret_val;
+	long i;
+	static long data_index;
+	long ddr_index;
+	long field_limit;
+	char *tag_wanted;
+	char *ptr;
+	long max_labels_formats;	// contains the maximum of the number of labels or the number of formats
+
+	/*
+	 * Check whether we have used up all of the data from the last record we
+	 * read.  If so, try to read another record.
+	 */
+	if (dr_tag >= dr.num_tags)  {
+		/* We have finished with the old record and need to read another. */
+		if (leaderless_flag == 0)  {
+			if ((ret_val = read_record(&dr.record_leader, &dr_buf)) < 0)  {
+				/* Error message was printed by read_record(), so just exit. */
+				exit(0);
+			}
+			else if (ret_val == 0)  {
+				return 0;
+			}
+
+			parse_dr(&dr);
+		}
+		else  {
+			if ((ret_val = read_function(fdesc, &dr_buf[dr.record_leader.fa_addr],
+			     dr.record_leader.length - dr.record_leader.fa_addr)) !=
+			     (dr.record_leader.length - dr.record_leader.fa_addr))  {
+				if (ret_val == 0)  {
+					return 0;
+				}
+				else  {
+					fprintf(stderr, "Tried to read %d bytes from SDTS record.  Got ret_val = %d\n",
+						dr.record_leader.length - dr.record_leader.fa_addr, ret_val);
+					exit(0);
+				}
+			}
+		}
+
+		dr_tag = 0;
+		dr_label = 0;
+		data_index = dr.record_leader.fa_addr;
+	}
+
+	/*
+	 * We are trying to pry the next tag/label pair out of the record.
+	 * Set a pointer to the tag we are looking for, and then search
+	 * for that tag in the DDR.
+	 */
+	tag_wanted = dr.user[dr_tag].tag;
+	for (ddr_index = 0; ddr_index < ddr.num_tags; ddr_index++)  {
+		if (strncmp(tag_wanted, ddr.user[ddr_index].tag, ddr.record_leader.field_tag_len) == 0)  {
+			break;
+		}
+	}
+	if (ddr_index == ddr.num_tags)  {
+		fprintf(stderr, "Failed to find user tag %.*s in DDR.\n", ddr.record_leader.field_tag_len, tag_wanted);
+		exit(0);
+	}
+
+	/*
+	 * Handle the data based on its type.  This particular
+	 * bunch of code is probably not anywhere near as
+	 * complex as a full implementation of the standard
+	 * would require.  Thus, if the program fails to parse a file,
+	 * this block of code may need to be beefed up.
+	 *
+	 * The first byte of the format control string gives the structure type:
+	 *   0 = Elementary Data	(A single data item per field)
+	 *   1 = Vector Data		(Multiple data items per field.  One dimensional.)
+	 *   2 = Array Data		(Multiple data items per field.  Two dimensional.)
+	 *
+	 * The second byte of the format control string gives the data type:
+	 *   0 = Character (Simple character data:  ABC)
+	 *   1 = Implicit point	(Numeric string with no explicit decimal point:  123)
+	 *   2 = Explicit point (Numeric string with an explicit decimal point:  1.23)
+	 *   3 = Explicit point scales (Numeric string with scale factor:  1.23E+04)
+	 *   4 = Character mode bit string (binary bits: 01011101)
+	 *   5 = Bit field (Similar to character mode bit string, but data is actual binary bit string)
+	 *   6 = Mixed data types
+	 *   7 = Haven't ever seen any above 6
+	 *
+	 * At this level of abstraction, we don't care much about the second byte.
+	 * The interpretation of the data in the subfields is done at a higher level.
+	 * However, the first byte adds some complications.  Structure types 0 and
+	 * 1 are fairly straightforward, type 2 can take a variety of forms.
+	 */
+	if ((ddr.record_leader.ichg_level == -1) || (ddr.record_leader.ichg_level == 1) || (ddr.user[ddr_index].field_cntrl[0] == '0'))  {
+		/*
+		 * We have a simple atomic data field, with no subfield label.
+		 */
+		subfield->tag = dr.user[dr_tag].tag;
+		subfield->label = "";
+		subfield->value = dr_buf + data_index;
+		subfield->format = "";
+		subfield->length = dr.user[dr_tag].field_len - 1;	// Subtract 1 for the terminator
+
+		data_index = data_index + dr.user[dr_tag].field_len;
+		dr_buf[data_index - 1] = '\0';
+
+		dr_label++;
+		if (dr_label >= ddr.user[ddr_index].num_labels)  {
+			dr_label = 0;
+			dr_tag++;
+		}
+	}
+	else if (ddr.user[ddr_index].field_cntrl[0] == '1')  {
+		/*
+		 * We have a vector of subfields, which may be of various types, each with its own label.
+		 *
+		 * The complications here arise when labels and/or formats
+		 * are not present.
+		 */
+		subfield->tag = dr.user[dr_tag].tag;
+		subfield->label = ddr.user[ddr_index].labels[dr_label];
+		subfield->value = dr_buf + data_index;
+		subfield->format = ddr.user[ddr_index].formats[dr_label];
+
+		field_limit = dr.record_leader.fa_addr + dr.user[dr_tag].field_pos + dr.user[dr_tag].field_len;
+
+		if (ddr.user[ddr_index].sizes[dr_label] > 0)  {
+			/*
+			 * A size was provided in the format string.  Use it.
+			 * There shouldn't be any UNIT_TERMINATORS between subfields.
+			 */
+			subfield->length = ddr.user[ddr_index].sizes[dr_label];
+			data_index = data_index + subfield->length;
+			if (data_index == (field_limit - 1))  {
+				/* If at end of field, step over FIELD_TERMINATOR */
+				data_index++;
+			}
+		}
+		else  {
+			/*
+			 * No size was provided in the format string.  (Or there was no format string.)
+			 * Must find the end of the subfield via the terminator.
+			 */
+			for (i = data_index; i < field_limit; i++)  {
+				if ((dr_buf[i] == UNIT_TERMINATOR) || (dr_buf[i] == FIELD_TERMINATOR))  {
+					break;
+				}
+			}
+			if (i == field_limit)  {
+				fprintf(stderr, "Ran out of data in DR.\n");
+				exit(0);
+			}
+			subfield->length = i - data_index;
+
+			data_index = i + 1;
+			dr_buf[data_index - 1] = '\0';
+		}
+
+		if ((ddr.user[ddr_index].num_labels > 0) || (ddr.user[ddr_index].num_formats > 0))  {
+			dr_label++;
+			if (dr_label >= ddr.user[ddr_index].num_labels)  {
+				dr_label = 0;
+				/*
+				 * I added the following "if" statement, around the "dr_tag++;"
+				 * statement, because some files don't define a sequence of
+				 * X,Y pairs as cartesian arrays (which are handled below), but
+				 * instead define such pairs as a non-cartesian pair of labels that
+				 * gets re-used until the field_limit is reached.  I am somewhat
+				 * doubtful that this type of construct is standards-conforming, but
+				 * the construct is present in some USGS DEM files, so we attempt to
+				 * handle it whether it is conforming or not.
+				 */
+				if (data_index == field_limit)  {
+					dr_tag++;
+				}
+			}
+		}
+		else if (data_index == field_limit)  {
+			dr_label = 0;
+			dr_tag++;
+		}
+	}
+	else if (ddr.user[ddr_index].field_cntrl[0] == '2')  {
+		/*
+		 * We have an array of subfields.
+		 *
+		 * Here is how I understand arrays, based on the sketchy data
+		 * at hand.  (This understanding may be wrong.)
+		 * The label field, in its most general form (which is called a
+		 * cartesian label), looks like:
+		 *
+		 *  A!B!C*D!E
+		 *
+		 * where the number of labels before and after the '*' may
+		 * differ from the example shown here.
+		 * In front of the cartesian delimiter (the '*') are the
+		 * row labels, and following it are the column labels.
+		 * The data, in the Field Area will fill the A row
+		 * with D and E values, then the B row with D and E
+		 * values, then the C row with D and E values.
+		 * (Actually, the situation is a bit more complicated, since
+		 * the array concept is not limited to two dimensions; but
+		 * let's ignore that complication for this simple routine.)
+		 *
+		 * This most general form is fairly straightforward to
+		 * handle.  (We would probably need to figure out some way to
+		 * return two or more subfield names at a time, but that isn't
+		 * a big deal.  A single subfield string, of the form
+		 * "(ROW_LABEL,COLUMN_LABEL)" would do the job.)
+		 *
+		 * The complications of this structure type arise when either
+		 * the row or column labels are missing.
+		 *
+		 * In order to keep things simple, we are only going to
+		 * support a single case, which is the only case I have
+		 * found so far in the USGS files.  This is the case
+		 * that looks like:
+		 *
+		 *   *ELEVATIONS
+		 *
+		 * or
+		 *
+		 *   *X!Y
+		 *
+		 * Technically, we should probably return a subfield label
+		 * like "(,ELEVATIONS)", "(,X)", or "(,Y)", but we will keep
+		 * it simple and just return "ELEVATIONS", "X", or "Y" for now.
+		 *
+		 * In the DDR parsing routine, we have stored away the locations where
+		 * the '*' delimiter appears.  We don't use this information now, but
+		 * it is available if we eventually need to handle some other cases.
+		 */
+		max_labels_formats = ddr.user[ddr_index].num_labels > ddr.user[ddr_index].num_formats ?
+					ddr.user[ddr_index].num_labels : ddr.user[ddr_index].num_formats;
+
+		subfield->tag = dr.user[dr_tag].tag;
+		subfield->label = ddr.user[ddr_index].labels[dr_label];
+		subfield->value = dr_buf + data_index;
+		subfield->format = ddr.user[ddr_index].formats[dr_label];
+
+		field_limit = dr.record_leader.fa_addr + dr.user[dr_tag].field_pos + dr.user[dr_tag].field_len;
+
+		if (ddr.user[ddr_index].sizes[dr_label] > 0)  {
+			/*
+			 * A size was provided in the format string.  Use it.
+			 * There shouldn't be any UNIT_TERMINATORS between subfields.
+			 */
+			subfield->length = ddr.user[ddr_index].sizes[dr_label];
+			data_index = data_index + subfield->length;
+			if (data_index == (field_limit - 1))  {
+				/* If at end of field, step over FIELD_TERMINATOR */
+				data_index++;
+			}
+		}
+		else  {
+			/*
+			 * No size was provided in the format string.  (Or there was no format string.)
+			 * Must find the end of the subfield via the terminator.
+			 */
+			for (i = data_index; i < field_limit; i++)  {
+				if ((dr_buf[i] == UNIT_TERMINATOR) || (dr_buf[i] == FIELD_TERMINATOR))  {
+					break;
+				}
+			}
+			if (i == field_limit)  {
+				fprintf(stderr, "Ran out of data in DR.\n");
+				exit(0);
+			}
+			subfield->length = i - data_index;
+
+			data_index = i + 1;
+			dr_buf[data_index - 1] = '\0';
+		}
+
+		/*
+		 * It is this little chunk of decision-making code that
+		 * has been simplified to handle only the single
+		 * case we discussed above.
+		 */
+		if (data_index >= (field_limit - 1))  {
+			dr_label = 0;
+			dr_tag++;
+		}
+		else  {
+			dr_label++;
+			if (dr_label >= max_labels_formats)  {
+				dr_label = 0;
+			}
+		}
+	}
+	else  {
+		fprintf(stderr, "Field structure type %c is unknown.\n", ddr.user[ddr_index].field_cntrl[0]);
+		exit(0);
+	}
+
+	return 1;
+
+}
+
+
+
+
+/*
+ * Open a DDF file for processing.
+ */
+int
+begin_ddf(char *file_name)
+{
+	long length;
+
+	leaderless_flag = 0;
+	dr_tag = MAX_TAGS;
+	dr_label = MAX_SUBFIELDS;
+
+	length = strlen(file_name);
+
+	if ((length > 3) && ((strcmp(file_name + length - 3, ".gz") == 0) ||
+	    (strcmp(file_name + length - 3, ".GZ") == 0)))  {
+		gz_flag = 1;
+		read_function = buf_read_z;
+		if ((fdesc = buf_open_z(file_name, O_RDONLY)) < 0)  {
+			return(fdesc);
+		}
+	}
+	else  {
+		gz_flag = 0;
+		read_function = buf_read;
+		if ((fdesc = buf_open(file_name, O_RDONLY)) < 0)  {
+			return(fdesc);
+		}
+	}
+
+	/*
+	 * Read and parse the DDR.
+	 */
+	parse_ddr();
+
+	return fdesc;
+}
+
+
+
+
+/*
+ * Close an open DDF file.
+ */
+void
+end_ddf()
+{
+	if (gz_flag == 0)  {
+		buf_close(fdesc);
+	}
+	else  {
+		buf_close_z(fdesc);
+	}
+
+	if (ddr_buf != (char *)0)  {
+		free(ddr_buf);
+	}
+	if (dr_buf != (char *)0)  {
+		free(dr_buf);
+	}
+}
+
+
+
+
+
+/*
+ * This is a simple program to exercise the above code.
+ * Given a DDF file, the program
+ * simply prints out every bit of data in the file.
+ *
+ * The first argument is the file name to open and parse.
+ * If there is a second argument (and we don't care what it looks
+ * like) the output will be in a compact form.
+ *
+ * To compile this program, do:
+ *
+ * cc -g -o sdts_test sdts_utils.c big_buf_io.c big_buf_io_z.c gunzip.c utilities.c -lm
+ */
+//main(int argc, char *argv[])
+//{
+//	struct subfield subfield;
+//	long compact_flag;
+//	long byte_order;
+//	long i;
+//	short j;
+//	long length;
+//
+//
+//	if ((argc != 2) && (argc != 3))  {
+//		fprintf(stderr, "Usage:  %s file_name.ddf [compact_flag]\n", argv[0]);
+//		exit(0);
+//	}
+//	if (argc == 3)  {
+//		compact_flag = 1;
+//	}
+//	else  {
+//		compact_flag = 0;
+//	}
+//
+//	/* find the native byte-order on this machine. */
+//	byte_order = swab_type();
+//
+//
+//	/* Open the DDF file. */
+//	if (begin_ddf(argv[1]) < 0)  {
+//		fprintf(stderr, "Couldn't open input file.\n");
+//		exit(0);
+//	}
+//
+//	/* print out the DDR for examination */
+////	print_ddr();
+//
+//	/*
+//	 * Read and parse a DR.
+//	 */
+//	while (get_subfield(&subfield) > 0)  {
+//		if (compact_flag == 0)  {
+//			fprintf(stdout, "subfield.tag = %s\n", subfield.tag);
+//			fprintf(stdout, "subfield.label = %s\n", subfield.label);
+//			fprintf(stdout, "subfield.format = %s\n", subfield.format);
+//			fprintf(stdout, "subfield.length = %d\n", subfield.length);
+//			if (strstr(subfield.format, "B") != (char *)0)  {
+//				if (subfield.length == 4)  {
+//					/* Special handling for 4-byte binary values. */
+//					fprintf(stdout, "subfield.value = unswabbed bin: 0x%2.2x%2.2x%2.2x%2.2x\t",
+//						0x000000ff & (long)subfield.value[0],
+//						0x000000ff & (long)subfield.value[1],
+//						0x000000ff & (long)subfield.value[2],
+//						0x000000ff & (long)subfield.value[3]);
+//					i = (((long)subfield.value[3] & 0xff) << 24) |
+//					    (((long)subfield.value[2] & 0xff) << 16) |
+//					    (((long)subfield.value[1] & 0xff) <<  8) |
+//					     ((long)subfield.value[0] & 0xff);
+//					if (byte_order == 1)  {
+//						LE_SWAB(&i);
+//					}
+//					else if (byte_order == 2)  {
+//						PDP_SWAB(&i);
+//					}
+//					fprintf(stdout, "swabbed dec: %10lu (unsigned)\t", i);
+//					fprintf(stdout, "%11ld (signed)\n\n", i);
+//				}
+//				else  {
+//					/* Special handling for 2-byte binary values. */
+//					fprintf(stdout, "subfield.value = unswabbed bin: 0x%2.2x%2.2x\t",
+//						0x000000ff & (long)subfield.value[0],
+//						0x000000ff & (long)subfield.value[1]);
+//					if (byte_order == 0)  {
+//						j = (((long)subfield.value[1] << 8) & 0x0000ff00) + ((long)subfield.value[0] & 0x000000ff);
+//					}
+//					else  {
+//						j = (((long)subfield.value[0] << 8) & 0x0000ff00) + ((long)subfield.value[1] & 0x000000ff);
+//					}
+//					fprintf(stdout, "swabbed dec: %5hu (unsigned)\t", j);
+//					fprintf(stdout, "%6hd (signed)\n\n", j);
+//				}
+//			}
+//			else  {
+//				/* Non-binary subfields can just be printed as ASCII strings. */
+//				fprintf(stdout, "subfield.value = \"%.*s\"\n\n", subfield.length, subfield.value);
+//			}
+//		}
+//		else  {
+//			/*
+//			 * If we are at the beginning of a record, print out an extra newline.
+//			 */
+//			length = strlen(subfield.tag);
+//			for (i = 0; i < (length - 1); i++)  {
+//				if (subfield.tag[i] != '0')  {
+//					break;
+//				}
+//			}
+//			if ((i == (length - 1)) && (subfield.tag[length - 1] >= '0') && (subfield.tag[length - 1] <= '9'))  {
+//				fprintf(stdout, "\n");
+//			}
+//
+//			fprintf(stdout, "%s\t", subfield.tag);
+//			fprintf(stdout, "%s\t", subfield.label);
+//			fprintf(stdout, "%s\t", subfield.format);
+//			fprintf(stdout, "%d\t", subfield.length);
+//			if (strstr(subfield.format, "B") != (char *)0)  {
+//				if (subfield.length == 4)  {
+//					/* Special handling for 4-byte binary values. */
+//					fprintf(stdout, "unswabbed bin: 0x%2.2x%2.2x%2.2x%2.2x\t",
+//						0x000000ff & (long)subfield.value[0],
+//						0x000000ff & (long)subfield.value[1],
+//						0x000000ff & (long)subfield.value[2],
+//						0x000000ff & (long)subfield.value[3]);
+//					i = (((long)subfield.value[3] & 0xff) << 24) |
+//					    (((long)subfield.value[2] & 0xff) << 16) |
+//					    (((long)subfield.value[1] & 0xff) <<  8) |
+//					     ((long)subfield.value[0] & 0xff);
+//					if (byte_order == 1)  {
+//						LE_SWAB(&i);
+//					}
+//					else if (byte_order == 2)  {
+//						PDP_SWAB(&i);
+//					}
+//					fprintf(stdout, "swabbed dec: %10lu (unsigned)\t", i);
+//					fprintf(stdout, "%11ld (signed)\n", i);
+//				}
+//				else  {
+//					/* Special handling for 2-byte binary values. */
+//					fprintf(stdout, "unswabbed bin: 0x%2.2x%2.2x\t",
+//						0x000000ff & (long)subfield.value[0],
+//						0x000000ff & (long)subfield.value[1]);
+//					if (byte_order == 0)  {
+//						j = (((long)subfield.value[1] << 8) & 0x0000ff00) + ((long)subfield.value[0] & 0x000000ff);
+//					}
+//					else  {
+//						j = (((long)subfield.value[0] << 8) & 0x0000ff00) + ((long)subfield.value[1] & 0x000000ff);
+//					}
+//					fprintf(stdout, "swabbed dec: %5hu (unsigned)\t", j);
+//					fprintf(stdout, "%6hd (signed)\n", j);
+//				}
+//			}
+//			else  {
+//				/* Non-binary subfields can just be printed as ASCII strings. */
+//				fprintf(stdout, "\"%.*s\"\n", subfield.length, subfield.value);
+//			}
+//		}
+//	}
+//
+//	end_ddf();
+//
+//	exit(0);
+//}

Added: packages/drawmap/branches/upstream/current/sdts_utils.h
===================================================================
--- packages/drawmap/branches/upstream/current/sdts_utils.h	                        (rev 0)
+++ packages/drawmap/branches/upstream/current/sdts_utils.h	2007-05-18 12:29:03 UTC (rev 834)
@@ -0,0 +1,39 @@
+/*
+ * =========================================================================
+ * sdts_utils.h - Some global definitions for SDTS files.
+ * Copyright (c) 2000  Fred M. Erickson
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2, or (at your option)
+ * any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program; if not, write to the Free Software
+ * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
+ * =========================================================================
+ */
+
+
+
+/*
+ * This structure retrieves a subfield from a DDF file.
+ */
+struct subfield  {
+	char *tag;
+	char *label;
+	char *value;
+	char *format;
+	long length;
+};
+
+
+void print_ddr();
+long get_subfield(struct subfield *);
+int begin_ddf(char *);
+void end_ddf();

Added: packages/drawmap/branches/upstream/current/unblock_dem.1n
===================================================================
--- packages/drawmap/branches/upstream/current/unblock_dem.1n	                        (rev 0)
+++ packages/drawmap/branches/upstream/current/unblock_dem.1n	2007-05-18 12:29:03 UTC (rev 834)
@@ -0,0 +1,104 @@
+.TH UNBLOCK_DEM 1 "Jul 24, 2001" \" -*- nroff -*-
+.SH NAME
+unblock_dem \- Insert linefeeds into a Digital Elevation Model (DEM) file
+.SH SYNOPSIS
+.B unblock_dem
+[-L]
+
+.SH DESCRIPTION
+The
+.I drawmap
+program processes USGS Digital Elevation Model (DEM) files
+(along with other types of files) to produce customized maps.
+.I Drawmap
+is able to read these files in their native format.
+However, since the files usually don't contain any linefeeds, they
+are difficult for a human to read.
+.I Unblock_dem
+filters a DEM file and inserts linefeeds for human readability.
+It takes input from the standard input and writes to the standard output.
+The input data must be in uncompressed form, not in gzip-compressed form.
+Also,
+.I unblock_dem
+does not work with SDTS DEM files or GTOPO30 files.
+.PP
+.I Unblock_dem
+normally takes no options, but, if you use the "-L" option,
+the program will print out some license information and exit.
+.PP
+Some DEM files already have embedded linefeeds.
+.I Unblock_dem
+makes a feeble attempt to detect these files and abort.  If the detection fails,
+the results are likely to be garbled, so it is wise to check the format of each file first.
+The
+.I drawmap
+program, with the "-i" option, will attempt to tell you whether a file contains linefeeds or not.
+.PP
+The intended use of
+.I unblock_dem
+is simply to let you conveniently examine the files to see what is in them.
+However, if you are in the mood for adventure, or if you need to repair a
+defective file, you can use
+.I unblock_dem
+to make the file easy to edit, then you can edit the file as desired.
+.I Drawmap
+can still process the files after linefeeds are inserted, but will become
+confused if any line (including the linefeed) is more than 1024 characters
+long.
+.I Drawmap
+may also become confused if you make any significant changes to the
+format of the file.
+.PP
+You may be able to use this capability to repair faulty data.  However, you
+need to be familiar with the content and structure of DEM files in order
+to do this effectively.  You also need to remember that
+.I drawmap
+depends on DEM files having a predictable structure, and it won't
+properly parse files that violate this predictability.
+Do this sort of thing only if you know what you are doing,
+or if you have a lot of free time on your hands.
+.PP
+It should be noted that you can obtain a similar effect by using
+the command recommended by the USGS:
+.PP
+dd if=inputfilename of=outputfilename ibs=4096 cbs=1024 conv=unblock
+.PP
+I prefer the
+.I unblock_dem
+command because it simply replaces the last byte of each record with
+a linefeed.  This lets me easily tell how long the original records were.
+The effect of
+.I unblock_dem
+or
+.I dd
+can be undone by typing:
+.PP
+dd if=inputfilename of=outputfilename ibs=4096 cbs=1024 conv=block
+.PP
+You may come across files that have a linefeed in the 1025th byte.
+Drawmap doesn't like these, both because it can't tell (within the space
+of the first 1024-byte record) whether the file contains linefeeds
+or not, and because the records exceed the maximum record length of 1024 bytes.
+You may be able to repair such files by blocking them with the above
+.I dd
+command.
+.SH SEE ALSO
+.I unblock_dlg(1), drawmap(1)
+\" =========================================================================
+\" unblock_dem.1 - The manual page for the unblock_dem program.
+\" Copyright (c) 2000,2001  Fred M. Erickson
+\"
+\" This program is free software; you can redistribute it and/or modify
+\" it under the terms of the GNU General Public License as published by
+\" the Free Software Foundation; either version 2, or (at your option)
+\" any later version.
+\"
+\" This program is distributed in the hope that it will be useful,
+\" but WITHOUT ANY WARRANTY; without even the implied warranty of
+\" MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+\" GNU General Public License for more details.
+\"
+\" You should have received a copy of the GNU General Public License
+\" along with this program; if not, write to the Free Software
+\" Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
+\" =========================================================================

Added: packages/drawmap/branches/upstream/current/unblock_dem.c
===================================================================
--- packages/drawmap/branches/upstream/current/unblock_dem.c	                        (rev 0)
+++ packages/drawmap/branches/upstream/current/unblock_dem.c	2007-05-18 12:29:03 UTC (rev 834)
@@ -0,0 +1,99 @@
+/*
+ * =========================================================================
+ * unblock_dem - A program to add newlines to a USGS DEM file.
+ * Copyright (c) 2000  Fred M. Erickson
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2, or (at your option)
+ * any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program; if not, write to the Free Software
+ * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
+ * =========================================================================
+ *
+ * This program unblocks the information in a DEM file
+ * by adding a newline to each record.
+ * It sets the last byte in each record (which should always be
+ * a blank) to a newline.
+ *
+ * The program reads from stdin and writes to stdout.
+ */
+#include <stdio.h>
+#include <sys/types.h>
+#include "drawmap.h"
+#include "dem.h"
+
+void
+license(void)
+{
+	fprintf(stderr, "This program is free software; you can redistribute it and/or modify\n");
+	fprintf(stderr, "it under the terms of the GNU General Public License as published by\n");
+	fprintf(stderr, "the Free Software Foundation; either version 2, or (at your option)\n");
+	fprintf(stderr, "any later version.\n\n");
+
+	fprintf(stderr, "This program is distributed in the hope that it will be useful,\n");
+	fprintf(stderr, "but WITHOUT ANY WARRANTY; without even the implied warranty of\n");
+	fprintf(stderr, "MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n");
+	fprintf(stderr, "GNU General Public License for more details.\n\n");
+
+	fprintf(stderr, "You should have received a copy of the GNU General Public License\n");
+	fprintf(stderr, "along with this program; if not, write to the Free Software\n");
+	fprintf(stderr, "Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.\n");
+}
+
+main(int argc, char *argv[])
+{
+	unsigned char buf[DEM_RECORD_LENGTH];
+	int start_flag = 0;
+	int ret_val;
+	int i;
+
+	if ((argc == 2) && (argv[1][0] == '-') && (argv[1][1] == 'L'))  {
+		license();
+		exit(0);
+	}
+	else if (argc != 1)  {
+		fprintf(stderr, "Usage:  %s < dem_file\n", argv[0]);
+		exit(0);
+	}
+
+	while ((ret_val = read(0, buf, DEM_RECORD_LENGTH)) == DEM_RECORD_LENGTH)  {
+		if (start_flag == 0)  {
+			/*
+			 * Check for newlines in the first read
+			 * to try to prevent people from converting files
+			 * that already have newlines in them.
+			 */
+			for (i = 0; i < DEM_RECORD_LENGTH; i++)  {
+				if (buf[i] == '\n')  {
+					fprintf(stderr, "This file already has newlines in it.  Aborting.\n");
+					exit(0);
+				}
+			}
+			start_flag = 1;
+		}
+		if (buf[DEM_RECORD_LENGTH - 1] != ' ')  {
+			/*
+			 * In my experience, no optional-format DEM file has non-white-space
+			 * in the last byte of a record.  Don't do the conversion if there
+			 * is actual data there.
+			 */
+			fprintf(stderr, "This file may have formatting problems.  Aborting.\n");
+			exit(0);
+		}
+		buf[DEM_RECORD_LENGTH - 1] = '\n';
+		write(1, buf, ret_val);
+	}
+
+	if ((ret_val != 0) && (ret_val != DEM_RECORD_LENGTH))  {
+		fprintf(stderr, "read() returned %d\n", ret_val);
+		exit(0);
+	}
+}

Added: packages/drawmap/branches/upstream/current/unblock_dlg.1n
===================================================================
--- packages/drawmap/branches/upstream/current/unblock_dlg.1n	                        (rev 0)
+++ packages/drawmap/branches/upstream/current/unblock_dlg.1n	2007-05-18 12:29:03 UTC (rev 834)
@@ -0,0 +1,106 @@
+.TH UNBLOCK_DLG 1 "Jul 24, 2001" \" -*- nroff -*-
+.SH NAME
+unblock_dlg \- Insert linefeeds into an optional-format Digital Line Graph (DLG) file
+.SH SYNOPSIS
+.B unblock_dlg
+[-L]
+
+.SH DESCRIPTION
+The
+.I drawmap
+program processes USGS optional-format Digital Line Graph (DLG) files
+(along with other types of files) to produce customized maps.
+.I Drawmap
+is able to read these files in their native format.
+However, since the files usually don't contain any linefeeds, they
+are difficult for a human to read.
+.I Unblock_dlg
+filters a DLG file and inserts linefeeds for human readability.
+It takes input from the standard input and writes to the standard output.
+The input data must be in uncompressed form, not in gzip-compressed form.
+Also,
+.I unblock_dlg
+does not work with SDTS DLG files.
+.PP
+.I Unblock_dlg
+normally takes no options, but, if you use the "-L" option,
+the program will print out some license information and exit.
+.PP
+Some DLG files already have embedded linefeeds.
+.I Unblock_dlg
+makes a feeble attempt to detect these files and abort.  If the detection fails,
+the results are likely to be garbled, so it is wise to check the format of each file first.
+The
+.I drawmap
+program, with the "-i" option, will attempt to tell you whether a file contains linefeeds or not.
+.PP
+The intended use of
+.I unblock_dlg
+is simply to let you conveniently examine the files to see what is in them.
+However, if you are in the mood for adventure, or if you need to repair a
+defective file, you can use
+.I unblock_dlg
+to make the file easy to edit, then you can edit the file as desired.
+.I Drawmap
+can still process the files after linefeeds are inserted, but will become
+confused if any line (including the linefeed) is more than 80 characters
+long.
+.I Drawmap
+may also become confused if you make any significant changes to the
+format of the file.
+.PP
+You can use this capability to remove selected
+items from DLG files, when those items cause problems.  You may even
+be able to repair faulty data.  However, you
+need to be familiar with the content and structure of DLG files in order
+to do this effectively.  You also need to remember that
+.I drawmap
+depends on DLG files having a predictable structure, and it won't
+properly parse files that violate this predictability.
+Do this sort of thing only if you know what you are doing,
+or if you have a lot of free time on your hands.
+.PP
+It should be noted that you can obtain a similar effect by using
+the command recommended by the USGS:
+.PP
+dd if=inputfilename of=outputfilename ibs=8000 cbs=80 conv=unblock
+.PP
+I prefer the
+.I unblock_dlg
+command because it simply replaces the last byte of each record with
+a linefeed.  This lets me easily tell how long the original records were.
+The effect of
+.I unblock_dlg
+or
+.I dd
+can be undone by typing:
+.PP
+dd if=inputfilename of=outputfilename ibs=8000 cbs=80 conv=block
+.PP
+You may come across files that have a linefeed in the 81st byte.
+Drawmap doesn't like these, both because it can't tell (within the space
+of the first 80-byte record) whether the file contains linefeeds
+or not, and because the records exceed the maximum record length of 80 bytes.
+You may be able to repair such files by blocking them with the above
+.I dd
+command.
+.SH SEE ALSO
+.I unblock_dem(1), drawmap(1)
+\" =========================================================================
+\" unblock_dlg.1 - The manual page for the unblock_dlg program.
+\" Copyright (c) 2000,2001  Fred M. Erickson
+\"
+\" This program is free software; you can redistribute it and/or modify
+\" it under the terms of the GNU General Public License as published by
+\" the Free Software Foundation; either version 2, or (at your option)
+\" any later version.
+\"
+\" This program is distributed in the hope that it will be useful,
+\" but WITHOUT ANY WARRANTY; without even the implied warranty of
+\" MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+\" GNU General Public License for more details.
+\"
+\" You should have received a copy of the GNU General Public License
+\" along with this program; if not, write to the Free Software
+\" Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
+\" =========================================================================

Added: packages/drawmap/branches/upstream/current/unblock_dlg.c
===================================================================
--- packages/drawmap/branches/upstream/current/unblock_dlg.c	                        (rev 0)
+++ packages/drawmap/branches/upstream/current/unblock_dlg.c	2007-05-18 12:29:03 UTC (rev 834)
@@ -0,0 +1,103 @@
+/*
+ * =========================================================================
+ * unblock_dlg - A program to add newlines to an optional-format USGS DLG file.
+ * Copyright (c) 1997,2000  Fred M. Erickson
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2, or (at your option)
+ * any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program; if not, write to the Free Software
+ * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
+ * =========================================================================
+ *
+ * This program unblocks the information in an optional-format DLG file
+ * by adding a newline to each record.
+ * It sets the last byte in each record (which should always be
+ * a blank) to a newline.
+ *
+ * The program reads from stdin and writes to stdout.
+ */
+#include <stdio.h>
+#include <sys/types.h>
+#include "drawmap.h"
+#include "dlg.h"
+
+void
+license(void)
+{
+	fprintf(stderr, "This program is free software; you can redistribute it and/or modify\n");
+	fprintf(stderr, "it under the terms of the GNU General Public License as published by\n");
+	fprintf(stderr, "the Free Software Foundation; either version 2, or (at your option)\n");
+	fprintf(stderr, "any later version.\n\n");
+
+	fprintf(stderr, "This program is distributed in the hope that it will be useful,\n");
+	fprintf(stderr, "but WITHOUT ANY WARRANTY; without even the implied warranty of\n");
+	fprintf(stderr, "MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n");
+	fprintf(stderr, "GNU General Public License for more details.\n\n");
+
+	fprintf(stderr, "You should have received a copy of the GNU General Public License\n");
+	fprintf(stderr, "along with this program; if not, write to the Free Software\n");
+	fprintf(stderr, "Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.\n");
+}
+
+main(int argc, char *argv[])
+{
+	unsigned char buf[DLG_RECORD_LENGTH];
+	int start_flag = 0;
+	int ret_val;
+	int i;
+
+	if ((argc == 2) && (argv[1][0] == '-') && (argv[1][1] == 'L'))  {
+		license();
+		exit(0);
+	}
+	else if (argc != 1)  {
+		fprintf(stderr, "Usage:  %s < optional_format_dlg_file.opt\n", argv[0]);
+		exit(0);
+	}
+
+	while ((ret_val = read(0, buf, DLG_RECORD_LENGTH)) == DLG_RECORD_LENGTH)  {
+		if (start_flag == 0)  {
+			/*
+			 * Check for newlines in the first read
+			 * to try to prevent people from converting files
+			 * that already have newlines in them.
+			 */
+			for (i = 0; i < DLG_RECORD_LENGTH; i++)  {
+				if (buf[i] == '\n')  {
+					fprintf(stderr, "This file already has newlines in it.  Aborting.\n");
+					exit(0);
+				}
+			}
+			start_flag = 1;
+		}
+//		if (buf[DLG_RECORD_LENGTH - 1] != ' ')  {
+//			/*
+//			 * According to the standard, bytes 73-80 of each record are
+//			 * either blank (which seems to usually be the case) or can
+//			 * contain a record sequence number.  This if-block assumes
+//			 * that there is always a blank in byte 80, and checks for
+//			 * record sanity based on that assumption.  Of course, the
+//			 * check will erroneously fail if there is a record sequence
+//			 * number.
+//			 */
+//			fprintf(stderr, "This file may have formatting problems.  Aborting.\n");
+//			exit(0);
+//		}
+		buf[DLG_RECORD_LENGTH - 1] = '\n';
+		write(1, buf, ret_val);
+	}
+
+	if ((ret_val != 0) && (ret_val != DLG_RECORD_LENGTH))  {
+		fprintf(stderr, "read() returned %d\n", ret_val);
+		exit(0);
+	}
+}

Added: packages/drawmap/branches/upstream/current/utilities.c
===================================================================
--- packages/drawmap/branches/upstream/current/utilities.c	                        (rev 0)
+++ packages/drawmap/branches/upstream/current/utilities.c	2007-05-18 12:29:03 UTC (rev 834)
@@ -0,0 +1,694 @@
+/*
+ * =========================================================================
+ * utilities - A library of utility functions used by the drawmap program.
+ * Copyright (c) 1997  Fred M. Erickson
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2, or (at your option)
+ * any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program; if not, write to the Free Software
+ * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
+ * =========================================================================
+ */
+
+#include <math.h>
+#include <sys/types.h>
+#include "drawmap.h"
+
+
+/*
+ * Convert latitudes in degree/min/sec format into decimal degrees.
+ *
+ * We assume that there is no decimal point, or other punctuation,
+ * and that the numeric latitude is in the DDMMSS format,
+ * and that the latitude is immediately followed by 'N' or 'S'.
+ */
+double lat_conv(unsigned char *ptr)
+{
+	double lat;
+	double min;
+	double sec;
+
+	lat = *ptr - '0';
+	lat = lat * 10.0 + *(ptr + 1) - '0';
+	min = *(ptr + 2) - '0';
+	min = min * 10.0 + *(ptr + 3) - '0';
+	sec = *(ptr + 4) - '0';
+	sec = sec * 10.0 + *(ptr + 5) - '0';
+	lat = lat + min / 60.0 + sec / 3600.0;
+	if (*(ptr + 6) == 'S')  {
+		lat = -lat;
+	}
+	return(lat);
+}
+
+
+/*
+ * Convert longitudes in degree/min/sec format into decimal degrees.
+ *
+ * We assume that there is no decimal point, or other punctuation,
+ * and that the numeric longitude is in the DDDMMSS format,
+ * and that the longitude is immediately followed by 'W' or 'E'.
+ */
+double lon_conv(unsigned char *ptr)
+{
+	double lon;
+	double min;
+	double sec;
+
+	lon = *ptr - '0';
+	lon = lon * 10.0 + *(ptr + 1) - '0';
+	lon = lon * 10.0 + *(ptr + 2) - '0';
+	min = *(ptr + 3) - '0';
+	min = min * 10.0 + *(ptr + 4) - '0';
+	sec = *(ptr + 5) - '0';
+	sec = sec * 10.0 + *(ptr + 6) - '0';
+	lon = lon + min / 60.0 + sec / 3600.0;
+	if (*(ptr + 7) == 'W')  {
+		lon = -lon;
+	}
+	return(lon);
+}
+
+
+
+
+/* Round double values to long integers. */
+long
+round(double f)
+{
+	long i;
+	double ff;
+
+	i = (long)f;
+	ff = (double)i;
+
+	if (f < 0.0)  {
+		if ((ff - f) >= 0.5)  {
+			return(i - 1);
+		}
+	}
+	else  {
+		if ((f - ff) >= 0.5)  {
+			return(i + 1);
+		}
+	}
+
+	return(i);
+}
+
+
+
+/* Find the maximum of two long integers. */
+long
+max(long a, long b)
+{
+	if (a > b)  {
+		return(a);
+	}
+	else  {
+		return(b);
+	}
+}
+
+
+
+/* Find the minimum of three doubles */
+double
+min3(double a, double b, double c)
+{
+	if (a < b)  {
+		if (a < c)  {
+			return(a);
+		}
+		else  {
+			return(c);
+		}
+	}
+	else  {
+		if (b < c)  {
+			return(b);
+		}
+		else  {
+			return(c);
+		}
+	}
+}
+
+
+
+/* Find the maximum of three doubles */
+double
+max3(double a, double b, double c)
+{
+	if (a > b)  {
+		if (a > c)  {
+			return(a);
+		}
+		else  {
+			return(c);
+		}
+	}
+	else  {
+		if (b > c)  {
+			return(b);
+		}
+		else  {
+			return(c);
+		}
+	}
+}
+
+
+
+
+/*
+ * Convert decimal degrees into degrees, minutes, seconds.
+ */
+void
+decimal_degrees_to_dms(double decimal, long *d, long *m, double *s)
+{
+	long sign;
+
+	if (decimal < 0.0)  {
+		sign = -1;
+		decimal = -decimal;
+	}
+	else  {
+		sign = 1;
+	}
+
+	*d = (long)decimal;
+	*m = (long)((decimal - (double)*d) * 60.0);
+	*s = (decimal - (double)*d - (double)*m / 60.) * 3600.00;
+	*d = sign * *d;
+}
+
+
+
+
+struct utm_zones  {
+	long	zone;
+	double	central_meridian;
+	double	low_boundary;
+	double	high_boundary;
+} utm_zones[61] =  {
+	 0,	   0.0,	   0.0,	   0.0,	// Dummy entry so that the zone indices are correct
+	 1,	-177.0,	-180.0,	-174.0,
+	 2,	-171.0,	-174.0,	-168.0,
+	 3,	-165.0,	-168.0,	-162.0,
+	 4,	-159.0,	-162.0,	-156.0,
+	 5,	-153.0,	-156.0,	-150.0,
+	 6,	-147.0,	-150.0,	-144.0,
+	 7,	-141.0,	-144.0,	-138.0,
+	 8,	-135.0,	-138.0,	-132.0,
+	 9,	-129.0,	-132.0,	-126.0,
+	10,	-123.0,	-126.0,	-120.0,
+	11,	-117.0,	-120.0,	-114.0,
+	12,	-111.0,	-114.0,	-108.0,
+	13,	-105.0,	-108.0,	-102.0,
+	14,	- 99.0,	-102.0,	- 96.0,
+	15,	- 93.0,	-096.0,	- 90.0,
+	16,	- 87.0,	-090.0,	- 84.0,
+	17,	- 81.0,	-084.0,	- 78.0,
+	18,	- 75.0,	-078.0,	- 72.0,
+	19,	- 69.0,	-072.0,	- 66.0,
+	20,	- 63.0,	-066.0,	- 60.0,
+	21,	- 57.0,	-060.0,	- 54.0,
+	22,	- 51.0,	-054.0,	- 48.0,
+	23,	- 45.0,	-048.0,	- 42.0,
+	24,	- 39.0,	-042.0,	- 36.0,
+	25,	- 33.0,	-036.0,	- 30.0,
+	26,	- 27.0,	-030.0,	- 24.0,
+	27,	- 21.0,	-024.0,	- 18.0,
+	28,	- 15.0,	-018.0,	- 12.0,
+	29,	-  9.0,	-012.0,	-  6.0,
+	30,	-  3.0,	-006.0,	   0.0,
+	31,	   3.0,	 000.0,	   6.0,
+	32,	   9.0,	 006.0,	  12.0,
+	33,	  15.0,	 012.0,	  18.0,
+	34,	  21.0,	 018.0,	  24.0,
+	35,	  27.0,	 024.0,	  30.0,
+	36,	  33.0,	 030.0,	  36.0,
+	37,	  39.0,	 036.0,	  42.0,
+	38,	  45.0,	 042.0,	  48.0,
+	39,	  51.0,	 048.0,	  54.0,
+	40,	  57.0,	 054.0,	  60.0,
+	41,	  63.0,	 060.0,	  66.0,
+	42,	  69.0,	 066.0,	  72.0,
+	43,	  75.0,	 072.0,	  78.0,
+	44,	  81.0,	 078.0,	  84.0,
+	45,	  87.0,	 084.0,	  90.0,
+	46,	  93.0,	 090.0,	  96.0,
+	47,	  99.0,	 096.0,	 102.0,
+	48,	 105.0,	 102.0,	 108.0,
+	49,	 111.0,	 108.0,	 114.0,
+	50,	 117.0,	 114.0,	 120.0,
+	51,	 123.0,	 120.0,	 126.0,
+	52,	 129.0,	 126.0,	 132.0,
+	53,	 135.0,	 132.0,	 138.0,
+	54,	 141.0,	 138.0,	 144.0,
+	55,	 147.0,	 144.0,	 150.0,
+	56,	 153.0,	 150.0,	 156.0,
+	57,	 159.0,	 156.0,	 162.0,
+	58,	 165.0,	 162.0,	 168.0,
+	59,	 171.0,	 168.0,	 174.0,
+	60,	 177.0,	 174.0,	 180.0,
+};
+
+/*
+ * The following two functions use Redfearn's formulas to calculate the
+ * forward and inverse projections between UTM coordinates and geographic
+ * (latitude/longitude) coordinates.
+ *
+ * Given some parameters for the selected ellipsoid, Redfearn's formulas
+ * allow one to translate back and forth between UTM
+ * coordinates and latitude/longitude coordinates.
+ * Before we examine Redfearn's formulas, here are some preliminary notes.
+ *
+ * These formulas were apparently originally published in 1948:
+ *
+ *	Redfearn, J.C.B., "Transverse Mercator Formulae", Empire Survey Review, 69, 1948, 318-322.
+ *
+ * I was unable to find a copy of this reference for verification, but did
+ * find several other documents that described the formulas.  From them, I
+ * pieced together the formulas here.
+ *
+ * A good reference for projection calculations of all kinds is supposed to be:
+ *      Snyder, John P., Map Projections -- A Working Manual:
+ *      U.S. Geological Survey Professional Paper 1395,
+ *      United States Government Printing Office, Washington D.C., 1987.
+ * Although I haven't personnally had a chance to read it, it is frequently
+ * recommended on the Internet.
+ *
+ * A software package for doing projections is the PROJ package.  It is
+ * available on the Internet.  I chose not to use it because my needs are
+ * limited, and it was easier to just write the software I need than integrate
+ * PROJ.  It is, however, a fine package, as far as I can tell.
+ *
+ * As far as I know, most or all of the currently-available 7.5min USGS data assumes
+ * the Clarke 1866 ellipsoid with the North American Datum of 1927 (nad-27).
+ * According to the DEM standards document, they are nad-27 if they have the
+ * old-format Type A record.  The new Type A record contains a field to specify the datum.
+ * I gather that the data will ultimately be re-referenced to the new GRS80
+ * ellipsoid and nad-83.
+ *
+ * The 1-degree DEMs may or may not be in the new WGS 84 datum.  The standards document
+ * says that recomputed data have been made available to the USGS, but doesn't say
+ * if these new data are what is available for download.  We probably don't care,
+ * for the time being, because the 1-degree data come in latitude/longitude format,
+ * and we don't convert them to any other form.
+ *
+ * Parameters for various ellipsoids are given in drawmap.h.
+ *
+ * Now, on to Redfearn's formulas.
+ *
+ *    Note:  In the equations that follow, y is the true northing, utm_y is the northing with
+ *           the false northing (10,000,000m) added in, x is the true easting, utm_y is the
+ *           easting with the false easting (500,000m) added in.
+ *
+ * They begin with the calculation of the length of arc of a meridian (a great
+ * circle passing through the poles).  The formula (in Macsyma format) is:
+ *
+ *     m = a * (1 - e^2) * int(1 - (e^2 sin^2(lat))^(-3/2), lat, lat1, lat2)
+ *
+ * (where the caret sign, '^', represents exponentiation and 'int' represents
+ * a definite integral of the given expression over the variable lat, from lat1 to lat2).
+ *
+ * We are interested in the case where lat1 = 0 (and the integral starts at the equator).
+ *
+ * This integral is normally calculated via a series expansion:
+ *
+ *     m = a * (A0 * lat - A2 * sin(2*lat) + A4 * sin(4*lat) - A6 * sin(6*lat))
+ *
+ *         A0 = 1 - (e^2 / 4) - (3 * e^4 / 64) - (5 * e^6 / 256)
+ *         A2 = (3/8) * (e^2 + e^4 / 4 + 15 e^6 / 128)
+ *         A4 = (15 / 256) * (e^4 + 3 * e^6 / 4)
+ *         A6 = 35 * e^6 / 3072
+ *
+ * With the value of m in hand, we move on to calculate the foot-point latitude, lat', which is
+ * the latitude for which m = y / k0
+ * where y is the true northing (which is just the northing in the northern climes, but is the
+ * nominal northing - 10,000,000 in the southern hemisphere) and k0 is the centeral-meridian
+ * scale factor.
+ *
+ * The foot-point latitude is found as follows:
+ *
+ *      n = (a - b) / (a + b) = f / (2 - f)
+ *      G = a * (1 - n) * (1 - n^2) * (1 + (9/4) * n^2 + (225/64) * n^4) * (pi / 180)
+ *      sigma = (m * pi) / (180 * G)
+ *      lat' = sigma + ((3*n/2) - (27*n^3/32)) * sin(2*sigma) + ((21*n^2/16) - (55*n^4/32)) * sin(4*sigma) + (151*n^3/96) * sin(6*sigma) + (1097*n^4/512) * sin(8*sigma)        in units of radians
+ *
+ * For the inverse projection, where the latitude is to-be-determined, there may
+ * be some snazzy way to find lat', but I chose to do it iteratively using
+ * Newton's nethod.  In my limited testing, this approach appears to converge
+ * quite rapidly, in about 2 or 3 iterations.  (I have not, however, tested the convergence
+ * rate rigorously.)
+ *
+ * Next, we need the radii of curvature, found from the formulas.
+ *
+ *      rho = a * (1 - e^2) / (1 - e^2 * sin^2(lat))^(3/2)
+ *      nu  = a / (1 - e^2 * sin^2(lat))^(1/2)
+ *      phi = nu / rho
+ *
+ * These are general formulas.  When we evaluate them specifically for the foot-point
+ * latitude, then we prime each of them:
+ *
+ *      rho' = a * (1 - e^2) / (1 - e^2 * sin^2(lat'))^(3/2)
+ *      nu'  = a / (1 - e^2 * sin^2(lat'))^(1/2)
+ *      phi' = nu' / rho'
+ *
+ *
+ * This completes the preliminary calculations.  Now, on to the actual conversions.
+ *
+ *
+ * lat/long to UTM, performed by function redfearn():
+ *    t = tan(lat)
+ *    omega = longitude * pi / 180 - central_meridian
+ *
+ *    utm_x:
+ *    x = k0 * nu * omega * cos(lat) * (1 +
+ *                                       (omega^2 / 6) * cos^2(lat) * (phi - t^2) +
+ *                                       (omega^4 / 120) * cos^4(lat) * (4 * phi^3 * (1 - 6 * t^2) + phi^2 * (1 + 8*t^2) - 2*phi*t^2 + t^4) +
+ *                                       (omega^6 / 5040) * cos^6(lat) * (61 - 479*t^2 + 179*t^4 - t^6))
+ *    utm_x = x + 500,000
+ *
+ *    utm_y:
+ *    y = k0 * (m + (omega^2 / 2) * nu * sin(lat) * cos(lat) +
+ *                   (omega^4 / 24) * nu * sin(lat) * cos^3(lat) * (4 * phi^2 + phi - t^2) +
+ *                   (omega^6 / 720) * nu * sin(lat) * cos^5(lat) * (8 * phi^4 * (11 - 24*t^2) -
+ *                    28 * phi^3 * (1 - 6*t^2) + phi^2 * (1 - 32*t^2) - 2*phi*t^2 + t^4) +
+ *                   (omega^8 / 40320) * nu * sin(lat) * cos^7(lat) * (1385 - 3111*t^2 + 543*t^4 - t^6))
+ *    utm_y = y + 10,000,000
+ *
+ *    grid convergence:
+ *    gamma = -omega * sin(lat) -
+ *            (omega^3 / 3) * sin(lat) * cos^2(lat) * (2*phi^2 - phi) -
+ *            (omega^5 / 15) * sin(lat) * cos^4(lat) * (phi^4 * (11 - 24*t^2) - phi^3 * (11 - 36*t^2) +
+ *             2*phi^2 * (1 - 7*t^2) + phi*t^2) -
+ *            (omega^7 / 315) * sin(lat) * cos^6(lat) * (17 - 26*t^2 + 2*t^4)
+ *
+ *    point scale factor:
+ *    k = k0 * (1 + (omega^2 / 2) * phi * cos^2(lat) +
+ *                  (omega^4 / 24) * cos^4(lat) * (4*phi^3 * (1 - 6*t^2) + phi^2 * (1 + 24*t^2) - 4*phi*t^2) +
+ *                  (omega^6 / 720) * cos^6(lat) * (61 - 148*t^2 + 16*t^4))
+ *
+ *
+ * UTM to lat/long, performed by function redfearn_inverse():
+ *    Note that the ugly construct, phi'^3, represents phi' taken to the third power.  It's
+ *    ugly, but adding enough parentheses to clarify it would arguably be uglier still.
+ *
+ *    x = utm_x - 500,000
+ *    d = x / (k0 * nu')
+ *    t' = tan(lat')
+ *
+ *    latitude:
+ *    lat = lat' - (nu' * t' / rho') * (d^2 / 2) +
+ *                 (nu' * t' / rho') * (d^4 / 24) * (-4*phi'^2 + 9*phi' * (1 - t'^2) + 12*t'^2) -
+ *                 (nu' * t' / rho') * (d^6 / 720) * (8*phi'^4 * (11 - 24*t'^2) - 12*phi'^3 * (21 - 71*t'^2) +
+ *                  15*phi'^2 * (15 - 98*t'^2 + 15*t'^4) + 180*phi' * (5*t'^2 - 3*t'^4) + 360*t'^4) +
+ *                 (nu' * t' / rho') * (d^8 / 40320) * (1385 + 3633*t'^2 + 4095*t'^4 + 1575*t'^6)
+ *
+ *    longitude:
+ *    omega = d * sec(lat') -
+ *            (d^3 / 6) * sec(lat') * (phi' + 2*t'^2) +
+ *            (d^5 / 120) * sec(lat') * (-4*phi'^3 * (1 - 6*t'^2) + phi'^2 * (9 - 68*t'^2) + 72*phi'*t'^2 + 24*t'^4) -
+ *            (d^7 / 5040) * sec(lat') * (61 + 662*t'^2 + 1320*t'^4 + 720*t'^6)
+ *
+ *    long = central_meridian + omega * 180 / pi
+ *
+ *    grid convergence:
+ *    gamma = -t' * d +
+ *            (t' * d^3 / 3) * (-2*phi'^2 + 3*phi' + t'^2) -
+ *            (t' * d^5 / 15) * (phi'^4 * (11 - 24*t'^2) - 3*phi'^3 * (8 - 23*t'^2) +
+ *             5*phi'^2 * (3 - 14*t'^2) + 30*phi'*t'^2 + 3*t'^4) +
+ *            (t' * d^7 / 315) * (17 + 77*t'^2 + 105*t'^4 + 45*t'^6)
+ *
+ *    point scale factor:
+ *    dd = x^2 / (k0^2 * rho' * nu')
+ *    k = k0 * (1 + dd/2 + (dd^2 / 24) * (4*phi' * (1 - 6*t'^2) - 3 * (1 - 16*t'^2) - 24*t'^2 / phi') + dd^3 / 720)
+ *
+ * Now, on to the actual code.
+ *
+ * Note:  redfearn_inverse() returns 0 if the conversion appears to be successful, nonzero otherwise.
+ *
+ * Note further:  This function has not been tuned for efficiency.
+ */
+long
+redfearn_inverse(struct datum *datum, double utm_x, double utm_y, long zone, double *latitude, double *longitude)
+{
+	double x, y;	// UTM coordinates with false easting and northing removed and scale factor applied.
+	double lat_pm;	// foot-point latitude
+	double d;
+	double t_pm;
+	double m, m_pm;
+	double nu_pm;
+	double rho_pm;
+	double phi_pm;
+	double slat, slat_2, clat, d_2, d_3, d_4, d_5, d_6, d_7, d_8;
+	double t_pm_2, t_pm_4, t_pm_6, phi_pm_2, phi_pm_3, phi_pm_4;
+	long i;
+
+	x = (utm_x - 500000.0) / datum->k0;
+
+	if ((zone > 60) || (zone == 0) || (zone < -60)) {
+		return -1;
+	}
+	if (zone < 0)  {
+		/* southern hemisphere */
+		zone = -zone;
+		y = (utm_y - 10000000.0) / datum->k0;
+		lat_pm = -M_PI / 4.0;
+	}
+	else  {
+		y = utm_y / datum->k0;
+		lat_pm = M_PI / 4.0;
+	}
+
+	/*
+	 * Find lat_pm, via iterative Newton's method.
+	 * The goal is to find lat_pm, such that m == y, or equivalently
+	 * to find a root of m-y.
+	 */
+	for (i = 0; i < 100; i++)  {
+		m = datum->a * (datum->a0 * lat_pm - datum->a2 * sin(2.0 * lat_pm) + datum->a4 * sin(4.0 * lat_pm) - datum->a6 * sin(6.0 * lat_pm)) - y;
+		m_pm = datum->a * (datum->a0 - datum->a2 * 2.0 * cos(2.0 * lat_pm) + datum->a4 * 4.0 * cos(4.0 * lat_pm) - datum->a6 * 6.0 * cos(6.0 * lat_pm));
+		if (fabs(m / m_pm) < 1.0e-12)  {
+			break;
+		}
+		lat_pm -= m / m_pm;
+	}
+
+	slat = sin(lat_pm);
+	slat_2 = slat * slat;
+	clat = sqrt(1.0 - slat_2);
+	t_pm = slat / clat;
+
+	nu_pm = datum->a / sqrt(1.0 - datum->e_2 * slat_2);
+	rho_pm = datum->a * (1.0 - datum->e_2) / pow(1.0 - datum->e_2 * slat_2, 1.5);
+	phi_pm = nu_pm / rho_pm;
+	d = x / nu_pm;
+
+	d_2 = d * d;
+	d_3 = d_2 * d;
+	d_4 = d_3 * d;
+	d_5 = d_4 * d;
+	d_6 = d_5 * d;
+	d_7 = d_6 * d;
+	d_8 = d_7 * d;
+	t_pm_2 = t_pm * t_pm;
+	t_pm_4 = t_pm_2 * t_pm_2;
+	t_pm_6 = t_pm_2 * t_pm_4;
+	phi_pm_2 = phi_pm * phi_pm;
+	phi_pm_3 = phi_pm_2 * phi_pm;
+	phi_pm_4 = phi_pm_3 * phi_pm;
+
+	*latitude = lat_pm - (nu_pm * t_pm / rho_pm) * ((d_2 / 2.0) -
+			 (d_4 / 24.0) * (-4.0 * phi_pm_2 + 9.0 * phi_pm * (1.0 - t_pm_2) + 12.0 * t_pm_2) +
+			 (d_6 / 720.0) * (8.0 * phi_pm_4 * (11.0 - 24.0 * t_pm_2) - 12.0 * phi_pm_3 *
+				(21.0 - 71.0 * t_pm_2) + 15.0 * phi_pm_2 * (15.0 - 98.0 * t_pm_2 + 15.0 * t_pm_4) +
+				180.0 * phi_pm * (5.0 * t_pm_2 - 3.0 * t_pm_4) + 360.0 * t_pm_4) -
+			 (d_8 / 40320.0) * (1385.0 + 3633.0 * t_pm_2 + 4095.0*t_pm_4 + 1575.0*t_pm_6));
+	*longitude = d / clat - (d_3 / 6.0) * (phi_pm + 2.0 * t_pm_2) / clat +
+		     (d_5 / 120.0) * (-4.0 * phi_pm_3 * (1.0 - 6.0 * t_pm_2) + phi_pm_2 * (9.0 - 68.0 * t_pm_2) +
+			72.0 * phi_pm * t_pm_2 + 24.0 * t_pm_4) / clat -
+		     (d_7 / 5040.0) * (61.0 + 662.0 * t_pm_2 + 1320.0 * t_pm_4 + 720.0 * t_pm_6) / clat;
+	*latitude = *latitude * 180.0 / M_PI;
+	*longitude = utm_zones[zone].central_meridian + *longitude * 180.0 / M_PI;
+
+	return 0;
+}
+
+/*
+ * Note:  redfearn() returns 0 if the conversion appears to be successful, nonzero otherwise.
+ * Note further:  latitudes are negative south of the equator.  longitudes are negative west of the prime meridian.
+ * Note further:  redfearn() returns a negative zone number for points in the southern hemisphere
+ * Note further:  This function has been only partially tuned for efficiency.
+ */
+long
+redfearn(struct datum *datum, double *utm_x, double *utm_y, long *zone, double latitude, double longitude, long east_west)
+{
+	double x, y;	// UTM coordinates with false easting and northing removed and scale factor applied.
+	double d;
+	double t;
+	double m;
+	double nu;
+	double rho;
+	double phi;
+	double o;
+	double t_2, t_4, t_6, o_2, o_3, o_4, o_5, o_6, o_7, o_8;
+	double slat, slat_2, clat, clat_2, clat_3, clat_4, clat_5, clat_6, clat_7;
+	double phi_2, phi_3, phi_4;
+	long i;
+	long south_flag = 0;	// If this flag is nonzero, the point is in the southern hemisphere.
+
+
+	/*
+	 * Note:  Originally the following check was
+	 *
+	 *    if ((latitude > 84.0) || (latitude < -80.0)) {
+	 *
+	 * because that is the valid range for UTM projections.
+	 * However, in order to allow projections to be done for the
+	 * GTOPO30 data, I changed the range to the full -90 to 90 span.
+	 */
+	if ((latitude > 90.0) || (latitude < -90.0)) {
+		return -1;
+	}
+	if ((longitude > 180.0) || (longitude < -180.0)) {
+		return -1;
+	}
+
+	/*
+	 * Determine the zone.
+	 *
+	 * If the point falls on the boundary between zones,
+	 * then use the parameter east_west to choose between zones.
+	 * If east_west is nonzero, then choose the eastern zone.
+	 * If east_west is 0, then choose the western zone.
+	 * If we are on the boundary between zone 1 and zone 60, then ignore
+	 * east_west and choose the zone based on the passed longitude.
+	 */
+	if (longitude == utm_zones[1].low_boundary)  {
+		*zone = 1;
+	}
+	else if (longitude == utm_zones[60].high_boundary)  {
+			*zone = 60;
+	}
+	else  {
+		for (i = 1; i <= 60; i++)  {
+			if (longitude == utm_zones[i].high_boundary)  {
+				if (east_west == 0)  {
+					*zone = i;
+				}
+				else  {
+					*zone = i + 1;
+				}
+				break;
+			}
+			else if ((longitude > utm_zones[i].low_boundary) && (longitude < utm_zones[i].high_boundary))  {
+				*zone = i;
+				break;
+			}
+		}
+	}
+
+	o = (longitude - utm_zones[*zone].central_meridian) * M_PI / 180.0;
+
+	latitude *= M_PI / 180.0;
+	longitude *= M_PI / 180.0;
+	slat = sin(latitude);
+	slat_2 = slat * slat;
+	clat = sqrt(1.0 - slat_2);	// cos(latitude)
+	t = slat / clat;		// tan(latitude)
+
+	t_2 = t * t;
+	t_4 = t_2 * t_2;
+	t_6 = t_2 * t_4;
+	o_2 = o * o;
+	o_3 = o_2 * o;
+	o_4 = o_2 * o_2;
+	o_5 = o_4 * o;
+	o_6 = o_4 * o_2;
+	o_7 = o_6 * o;
+	o_8 = o_4 * o_4;
+	clat_2 = clat * clat;
+	clat_3 = clat_2 * clat;
+	clat_4 = clat_2 * clat_2;
+	clat_5 = clat_4 * clat;
+	clat_6 = clat_4 * clat_2;
+	clat_7 = clat_6 * clat;
+
+	m = datum->a * (datum->a0 * latitude - datum->a2 * sin(2.0 * latitude) + datum->a4 * sin(4.0 * latitude) - datum->a6 * sin(6.0 * latitude));
+
+	nu = datum->a / sqrt(1.0 - datum->e_2 * slat_2);
+	rho = datum->a * (1.0 - datum->e_2) / pow(1.0 - datum->e_2 * slat_2, 1.5);
+	phi = nu / rho;
+
+	phi_2 = phi * phi;
+	phi_3 = phi_2 * phi;
+	phi_4 = phi_2 * phi_2;
+
+	*utm_x = 500000.0 + datum->k0 * nu * clat * (o + (o_3 / 6.0) * clat_2 * (phi - t_2) +
+			    (o_5 / 120.0) * clat_4 * (4.0 * phi_3 * (1.0 - 6.0 * t_2) +
+				phi_2 * (1.0 + 8.0 * t_2) - 2.0 * phi * t_2 + t_4) +
+			    (o_7 / 5040.0) * clat_6 * (61.0 - 479.0 * t_2 + 179.0 * t_4 - t_6));
+	*utm_y = datum->k0 * (m + (o_2 / 2.0) * nu * slat * clat +
+			   (o_4 / 24.0) * nu * slat * clat_3 * (4.0 * phi_2 + phi - t_2) +
+			   (o_6 / 720.0) * nu * slat * clat_5 * (8.0 * phi_4 * (11.0 - 24.0 * t_2) -
+				28.0 * phi_3 * (1.0 - 6.0 * t_2) + phi_2 * (1.0 - 32.0 * t_2) - 2.0 * phi * t_2 + t_4) +
+			   (o_8 / 40320.0) * nu * slat * clat_7 * (1385.0 - 3111.0*t_2 + 543.0*t_4 - t_6));
+
+	if (latitude < 0)  {
+		/* In the southern hemisphere, we return a negative zone number. */
+		*zone = -*zone;
+		*utm_y += 10000000.0;
+	}
+
+	return 0;
+}
+
+
+
+
+
+/*
+ * Check the type of swabbing needed on this machine.
+ */
+long swab_type()
+{
+	union swabtest {
+		unsigned long l;
+		unsigned char c[4];
+	} swabtest;
+
+	swabtest.l = 0xaabbccdd;
+
+	if ((swabtest.c[0] == 0xaa) && (swabtest.c[1] == 0xbb) &&
+	    (swabtest.c[2] == 0xcc) && (swabtest.c[3] == 0xdd))  {
+		/* BIG_ENDIAN: Do nothing */
+		return 0;
+	}
+	else if ((swabtest.c[0] == 0xdd) && (swabtest.c[1] == 0xcc) &&
+	    (swabtest.c[2] == 0xbb) && (swabtest.c[3] == 0xaa))  {
+		/* LITTLE_ENDIAN */
+		return 1;
+	}
+	else if ((swabtest.c[0] == 0xbb) && (swabtest.c[1] == 0xaa) &&
+	    (swabtest.c[2] == 0xdd) && (swabtest.c[3] == 0xcc))  {
+		/* PDP_ENDIAN */
+		return 2;
+	}
+	else  {
+		/* Unknown */
+		return -1;
+	}
+}

Added: packages/drawmap/branches/upstream/current/utm2ll.1n
===================================================================
--- packages/drawmap/branches/upstream/current/utm2ll.1n	                        (rev 0)
+++ packages/drawmap/branches/upstream/current/utm2ll.1n	2007-05-18 12:29:03 UTC (rev 834)
@@ -0,0 +1,75 @@
+.TH UTM2LL 1 "Jul 24, 2001" \" -*- nroff -*-
+.SH NAME
+utm2ll \- Convert UTM coordinates to latitude/longitude geographical coordinates
+.SH SYNOPSIS
+.B utm2ll
+[-L] | [utm_x utm_y zone [nad27 | nad83 | wgs84]]
+
+.SH DESCRIPTION
+This program uses Redfearn's formulas to convert a given set of Universal
+Transverse Mercator (UTM) coordinates into the equivalent latitude and longitude
+geographical coordinates.  (This operation is often referred to
+as inverse projection, since it projects a previously-projected flat surface
+back onto the curved surface from whence it originally came.)
+The inputs are the UTM "x" (also known as easting) value,
+the UTM "y" (also known as northing) value, and the utm zone.
+The "x" value includes the normal 500,000 false easting.
+The "y" value includes the normal 10,000,000 false northing,
+if the point is in the southern hemisphere.
+For points in the southern hemisphere, make the zone number negative.
+.PP
+Warning:  Not all of the possible triples of utm_x, utm_y, and zone
+values represent correct UTM coordinates.  The program will generally still
+produce latitude/longitude coordinates, even for incorrect inputs.
+You can check that your original inputs were correct by using
+.I ll2utm
+to convert the latitude/longitude coordinates back into UTM coordinates.
+.PP
+The output takes the form of a single line, containing the latitude
+and longitude, separated by white space.
+The values are in decimal degrees; with latitudes south of the
+equator being negative, and longitudes west of the prime meridian being negative.
+.PP
+If you provide just the "-L" option, the program will print some license
+information and exit.
+.PP
+Projections, and inverse projections, depend on defining an ellipsoid that
+approximates the shape of the earth (the reference ellipsoid) and defining
+reference coordinates (the datum) that allow measurements to be made.
+Different choices of the ellipsoid and datum can yield projections that differ by
+tens of meters.  There are a wide variety of choices, due to both
+the historical progression of measurement technology, and the
+desire to maximize accuracy over a given region (such as North America, or
+one of the United States).
+.PP
+This program defaults to the North American Datum of 1927 (NAD-27) with
+the Clarke Ellipsoid of 1866, since these appear to be appropriate for much
+of the freely-available data.
+The data are apparently in the process of being converted to the Geodetic
+Reference System 1980 (GRS-80) ellipsoid
+and NAD-83.  If you come across such data, you can specify "nad83"
+on the command line.
+The GTOPO30 data use the World Geodetic System 1984 (WGS-84) ellipsoid, which can be invoked by
+specifying "wgs84" on the command line.
+.SH SEE ALSO
+The
+.I ll2utm(1)
+command provides the inverse conversion.
+\" =========================================================================
+\" utm2ll.1 - The manual page for the utm2ll program.
+\" Copyright (c) 2000,2001  Fred M. Erickson
+\"
+\" This program is free software; you can redistribute it and/or modify
+\" it under the terms of the GNU General Public License as published by
+\" the Free Software Foundation; either version 2, or (at your option)
+\" any later version.
+\"
+\" This program is distributed in the hope that it will be useful,
+\" but WITHOUT ANY WARRANTY; without even the implied warranty of
+\" MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+\" GNU General Public License for more details.
+\"
+\" You should have received a copy of the GNU General Public License
+\" along with this program; if not, write to the Free Software
+\" Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
+\" =========================================================================

Added: packages/drawmap/branches/upstream/current/utm2ll.c
===================================================================
--- packages/drawmap/branches/upstream/current/utm2ll.c	                        (rev 0)
+++ packages/drawmap/branches/upstream/current/utm2ll.c	2007-05-18 12:29:03 UTC (rev 834)
@@ -0,0 +1,143 @@
+/*
+ * =========================================================================
+ * utm2ll - A program to convert UTM coordinates to latitude/longitude coordinates
+ * Copyright (c) 2000  Fred M. Erickson
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2, or (at your option)
+ * any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program; if not, write to the Free Software
+ * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
+ * =========================================================================
+ *
+ *
+ * Program to use Redfearn's formulas to convert UTM coordinates
+ * to latitude/longitude geographical coordinates.
+ *
+ * There aren't a lot of comments in this program because it is
+ * basically a wrapper that calls the appropriate conversion function
+ * in the file utilities.c.  See the comments there for a description
+ * of the conversion process.
+ */
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <sys/types.h>
+#include "drawmap.h"
+
+
+void
+license(void)
+{
+	fprintf(stderr, "This program is free software; you can redistribute it and/or modify\n");
+	fprintf(stderr, "it under the terms of the GNU General Public License as published by\n");
+	fprintf(stderr, "the Free Software Foundation; either version 2, or (at your option)\n");
+	fprintf(stderr, "any later version.\n\n");
+
+	fprintf(stderr, "This program is distributed in the hope that it will be useful,\n");
+	fprintf(stderr, "but WITHOUT ANY WARRANTY; without even the implied warranty of\n");
+	fprintf(stderr, "MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n");
+	fprintf(stderr, "GNU General Public License for more details.\n\n");
+
+	fprintf(stderr, "You should have received a copy of the GNU General Public License\n");
+	fprintf(stderr, "along with this program; if not, write to the Free Software\n");
+	fprintf(stderr, "Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.\n");
+}
+
+main(int argc, char *argv[])
+{
+	double utm_x, utm_y, longitude, latitude;
+	long zone;
+	long dtype;
+	struct datum datum;
+
+	if ((argc != 4) && (argc != 5))  {
+		if ((argc == 2) && (argv[1][0] == '-') && (argv[1][1] == 'L'))  {
+			license();
+			exit(0);
+		}
+		fprintf(stderr, "Convert UTM coordinates to latitude/longitude coordinates.\n");
+		fprintf(stderr, "Usage:  %s utm_x utm_y zone [nad27 | nad83 | wgs84]\n", argv[0]);
+		fprintf(stderr, "The default is nad27\n.");
+		exit(0);
+	}
+	utm_x = atof(argv[1]);
+	utm_y = atof(argv[2]);
+	zone = atoi(argv[3]);
+	if (argc == 5)  {
+		if (strcmp(argv[4], "nad27") == 0)  {
+			dtype = 0;
+		}
+		else if (strcmp(argv[4], "nad83") == 0)  {
+			dtype = 1;
+		}
+		else if (strcmp(argv[4], "wgs84") == 0)  {
+			dtype = 2;
+		}
+		else  {
+			fprintf(stderr, "Unknown datum specified.\n");
+			fprintf(stderr, "Usage:  %s utm_x utm_y zone [nad27 | nad83 | wgs84]\n", argv[0]);
+			fprintf(stderr, "Default is nad27.\n");
+			exit(0);
+		}
+	}
+	else  {
+		dtype = 0;
+	}
+
+
+	if (dtype == 0)  {
+		/* Fill in the datum parameters for NAD-27. */
+		datum.a = NAD27_SEMIMAJOR;
+		datum.b = NAD27_SEMIMINOR;
+		datum.e_2 = NAD27_E_SQUARED;
+		datum.f_inv = NAD27_F_INV;
+		datum.k0 = UTM_K0;
+		datum.a0 = NAD27_A0;
+		datum.a2 = NAD27_A2;
+		datum.a4 = NAD27_A4;
+		datum.a6 = NAD27_A6;
+	}
+	else if (dtype == 1)  {
+		/* Fill in the datum parameters for NAD-83. */
+		datum.a = NAD83_SEMIMAJOR;
+		datum.b = NAD83_SEMIMINOR;
+		datum.e_2 = NAD83_E_SQUARED;
+		datum.f_inv = NAD83_F_INV;
+		datum.k0 = UTM_K0;
+		datum.a0 = NAD83_A0;
+		datum.a2 = NAD83_A2;
+		datum.a4 = NAD83_A4;
+		datum.a6 = NAD83_A6;
+	}
+	else  {
+		/* Fill in the datum parameters for WGS-84. */
+		datum.a = WGS84_SEMIMAJOR;
+		datum.b = WGS84_SEMIMINOR;
+		datum.e_2 = WGS84_E_SQUARED;
+		datum.f_inv = WGS84_F_INV;
+		datum.k0 = UTM_K0;
+		datum.a0 = WGS84_A0;
+		datum.a2 = WGS84_A2;
+		datum.a4 = WGS84_A4;
+		datum.a6 = WGS84_A6;
+	}
+
+
+	if (redfearn_inverse(&datum, utm_x, utm_y, zone, &latitude, &longitude) != 0)  {
+		fprintf(stderr, "error in input parameters.\n");
+		exit(0);
+	}
+
+//	fprintf(stdout, "(%.10g %.10g %d) ===> (%.10g %.10g)\n", utm_x, utm_y, zone, latitude, longitude);
+	fprintf(stdout, "%.10g %.10g\n", latitude, longitude);
+	exit(0);
+}




More information about the Pkg-grass-devel mailing list