[tika] 07/23: Drop 03-ignore-netcdf.patch
Markus Koschany
apo-guest at moszumanska.debian.org
Mon Nov 30 20:27:40 UTC 2015
This is an automated email from the git hooks/post-receive script.
apo-guest pushed a commit to branch master
in repository tika.
commit 581e3ea412f393e5367108a5ce8e41f78b61675a
Author: Markus Koschany <apo at debian.org>
Date: Mon Nov 30 15:48:55 2015 +0000
Drop 03-ignore-netcdf.patch
---
debian/patches/03-ignore-netcdf.patch | 245 ----------------------------------
debian/patches/series | 1 -
2 files changed, 246 deletions(-)
diff --git a/debian/patches/03-ignore-netcdf.patch b/debian/patches/03-ignore-netcdf.patch
deleted file mode 100644
index 0b50388..0000000
--- a/debian/patches/03-ignore-netcdf.patch
+++ /dev/null
@@ -1,245 +0,0 @@
-Description: Remove the classes using the netcdf library which isn't in Debian yet
-Author: Emmanuel Bourg <ebourg at apache.org>
-Forwarded: not-needed
-
---- a/tika-parsers/src/main/java/org/apache/tika/parser/netcdf/NetCDFParser.java
-+++ /dev/null
-@@ -1,115 +0,0 @@
--/*
-- * Licensed to the Apache Software Foundation (ASF) under one or more
-- * contributor license agreements. See the NOTICE file distributed with
-- * this work for additional information regarding copyright ownership.
-- * The ASF licenses this file to You under the Apache License, Version 2.0
-- * (the "License"); you may not use this file except in compliance with
-- * the License. You may obtain a copy of the License at
-- *
-- * http://www.apache.org/licenses/LICENSE-2.0
-- *
-- * Unless required by applicable law or agreed to in writing, software
-- * distributed under the License is distributed on an "AS IS" BASIS,
-- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- * See the License for the specific language governing permissions and
-- * limitations under the License.
-- */
--package org.apache.tika.parser.netcdf;
--
--//JDK imports
--import java.io.ByteArrayOutputStream;
--import java.io.IOException;
--import java.io.InputStream;
--import java.util.Collections;
--import java.util.Set;
--
--import org.apache.tika.exception.TikaException;
--import org.apache.tika.io.IOUtils;
--import org.apache.tika.metadata.Metadata;
--import org.apache.tika.metadata.Property;
--import org.apache.tika.metadata.TikaCoreProperties;
--import org.apache.tika.mime.MediaType;
--import org.apache.tika.parser.AbstractParser;
--import org.apache.tika.parser.ParseContext;
--import org.apache.tika.parser.Parser;
--import org.apache.tika.sax.XHTMLContentHandler;
--import org.xml.sax.ContentHandler;
--import org.xml.sax.SAXException;
--
--import ucar.nc2.Attribute;
--import ucar.nc2.NetcdfFile;
--
--/**
-- * A {@link Parser} for <a
-- * href="http://www.unidata.ucar.edu/software/netcdf/index.html">NetCDF</a>
-- * files using the UCAR, MIT-licensed <a
-- * href="http://www.unidata.ucar.edu/software/netcdf-java/">NetCDF for Java</a>
-- * API.
-- */
--public class NetCDFParser extends AbstractParser {
--
-- /** Serial version UID */
-- private static final long serialVersionUID = -5940938274907708665L;
--
-- private final Set<MediaType> SUPPORTED_TYPES =
-- Collections.singleton(MediaType.application("x-netcdf"));
--
-- /*
-- * (non-Javadoc)
-- *
-- * @see
-- * org.apache.tika.parser.Parser#getSupportedTypes(org.apache.tika.parser
-- * .ParseContext)
-- */
-- public Set<MediaType> getSupportedTypes(ParseContext context) {
-- return SUPPORTED_TYPES;
-- }
--
-- /*
-- * (non-Javadoc)
-- *
-- * @see org.apache.tika.parser.Parser#parse(java.io.InputStream,
-- * org.xml.sax.ContentHandler, org.apache.tika.metadata.Metadata,
-- * org.apache.tika.parser.ParseContext)
-- */
-- public void parse(InputStream stream, ContentHandler handler,
-- Metadata metadata, ParseContext context) throws IOException,
-- SAXException, TikaException {
-- ByteArrayOutputStream os = new ByteArrayOutputStream();
-- IOUtils.copy(stream, os);
--
-- String name = metadata.get(Metadata.RESOURCE_NAME_KEY);
-- if (name == null) {
-- name = "";
-- }
--
-- try {
-- NetcdfFile ncFile = NetcdfFile.openInMemory(name, os.toByteArray());
--
-- // first parse out the set of global attributes
-- for (Attribute attr : ncFile.getGlobalAttributes()) {
-- Property property = resolveMetadataKey(attr.getName());
-- if (attr.getDataType().isString()) {
-- metadata.add(property, attr.getStringValue());
-- } else if (attr.getDataType().isNumeric()) {
-- int value = attr.getNumericValue().intValue();
-- metadata.add(property, String.valueOf(value));
-- }
-- }
-- } catch (IOException e) {
-- throw new TikaException("NetCDF parse error", e);
-- }
--
-- XHTMLContentHandler xhtml = new XHTMLContentHandler(handler, metadata);
-- xhtml.startDocument();
-- xhtml.endDocument();
-- }
--
-- private Property resolveMetadataKey(String localName) {
-- if ("title".equals(localName)) {
-- return TikaCoreProperties.TITLE;
-- }
-- return Property.internalText(localName);
-- }
--
--}
---- a/tika-parsers/src/main/java/org/apache/tika/parser/hdf/HDFParser.java
-+++ /dev/null
-@@ -1,120 +0,0 @@
--/*
-- * Licensed to the Apache Software Foundation (ASF) under one or more
-- * contributor license agreements. See the NOTICE file distributed with
-- * this work for additional information regarding copyright ownership.
-- * The ASF licenses this file to You under the Apache License, Version 2.0
-- * (the "License"); you may not use this file except in compliance with
-- * the License. You may obtain a copy of the License at
-- *
-- * http://www.apache.org/licenses/LICENSE-2.0
-- *
-- * Unless required by applicable law or agreed to in writing, software
-- * distributed under the License is distributed on an "AS IS" BASIS,
-- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- * See the License for the specific language governing permissions and
-- * limitations under the License.
-- */
--
--package org.apache.tika.parser.hdf;
--
--//JDK imports
--import java.io.ByteArrayOutputStream;
--import java.io.IOException;
--import java.io.InputStream;
--import java.util.Collections;
--import java.util.Set;
--
--import org.apache.tika.exception.TikaException;
--import org.apache.tika.io.IOUtils;
--import org.apache.tika.metadata.Metadata;
--import org.apache.tika.mime.MediaType;
--import org.apache.tika.parser.AbstractParser;
--import org.apache.tika.parser.ParseContext;
--import org.apache.tika.parser.netcdf.NetCDFParser;
--import org.apache.tika.sax.XHTMLContentHandler;
--import org.xml.sax.ContentHandler;
--import org.xml.sax.SAXException;
--
--import ucar.nc2.Attribute;
--import ucar.nc2.Group;
--import ucar.nc2.NetcdfFile;
--
--/**
-- *
-- * Since the {@link NetCDFParser} depends on the <a
-- * href="http://www.unidata.ucar.edu/software/netcdf-java" >NetCDF-Java</a> API,
-- * we are able to use it to parse HDF files as well. See <a href=
-- * "http://www.unidata.ucar.edu/software/netcdf-java/formats/FileTypes.html"
-- * >this link</a> for more information.
-- */
--public class HDFParser extends AbstractParser {
--
-- /** Serial version UID */
-- private static final long serialVersionUID = 1091208208003437549L;
--
-- private static final Set<MediaType> SUPPORTED_TYPES =
-- Collections.singleton(MediaType.application("x-hdf"));
--
-- /*
-- * (non-Javadoc)
-- *
-- * @see
-- * org.apache.tika.parser.netcdf.NetCDFParser#getSupportedTypes(org.apache
-- * .tika.parser.ParseContext)
-- */
-- public Set<MediaType> getSupportedTypes(ParseContext context) {
-- return SUPPORTED_TYPES;
-- }
--
-- /*
-- * (non-Javadoc)
-- *
-- * @see
-- * org.apache.tika.parser.netcdf.NetCDFParser#parse(java.io.InputStream,
-- * org.xml.sax.ContentHandler, org.apache.tika.metadata.Metadata,
-- * org.apache.tika.parser.ParseContext)
-- */
-- public void parse(InputStream stream, ContentHandler handler,
-- Metadata metadata, ParseContext context) throws IOException,
-- SAXException, TikaException {
-- ByteArrayOutputStream os = new ByteArrayOutputStream();
-- IOUtils.copy(stream, os);
--
-- String name = metadata.get(Metadata.RESOURCE_NAME_KEY);
-- if (name == null) {
-- name = "";
-- }
-- try {
-- NetcdfFile ncFile = NetcdfFile.openInMemory(name, os.toByteArray());
-- unravelStringMet(ncFile, null, metadata);
-- } catch (IOException e) {
-- throw new TikaException("HDF parse error", e);
-- }
--
-- XHTMLContentHandler xhtml = new XHTMLContentHandler(handler, metadata);
-- xhtml.startDocument();
-- xhtml.endDocument();
-- }
--
-- protected void unravelStringMet(NetcdfFile ncFile, Group group, Metadata met) {
-- if (group == null) {
-- group = ncFile.getRootGroup();
-- }
--
-- // unravel its string attrs
-- for (Attribute attribute : group.getAttributes()) {
-- if (attribute.isString()) {
-- met.add(attribute.getName(), attribute.getStringValue());
-- } else {
-- // try and cast its value to a string
-- met.add(attribute.getName(), String.valueOf(attribute
-- .getNumericValue()));
-- }
-- }
--
-- for (Group g : group.getGroups()) {
-- unravelStringMet(ncFile, g, met);
-- }
-- }
--
--}
diff --git a/debian/patches/series b/debian/patches/series
index 8a50af8..21b7949 100644
--- a/debian/patches/series
+++ b/debian/patches/series
@@ -1,4 +1,3 @@
01-jar-packaging.patch
-03-ignore-netcdf.patch
05-osgi-compatibility.patch
06-optional-parser-dependencies.patch
--
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/pkg-java/tika.git
More information about the pkg-java-commits
mailing list