[Git][debian-gis-team/python-pdal][upstream] New upstream version 2.1.0~rc1+ds

Bas Couwenberg gitlab at salsa.debian.org
Wed Oct 31 17:24:56 GMT 2018


Bas Couwenberg pushed to branch upstream at Debian GIS Project / python-pdal


Commits:
7bf8b61b by Bas Couwenberg at 2018-10-31T16:52:05Z
New upstream version 2.1.0~rc1+ds
- - - - -


16 changed files:

- CHANGES.txt
- PKG-INFO
- README.rst
- VERSION.txt
- pdal/PyArray.hpp
- + pdal/PyDimension.hpp
- pdal/PyPipeline.cpp
- pdal/PyPipeline.hpp
- pdal/__init__.py
- + pdal/array.py
- + pdal/dimension.py
- pdal/libpdalpython.cpp
- pdal/libpdalpython.pyx
- pdal/pipeline.py
- setup.py
- test/test_pipeline.py


Changes:

=====================================
CHANGES.txt
=====================================
@@ -1,3 +1,12 @@
 Changes
 ================================================================================
 
+2.0.0
+--------------------------------------------------------------------------------
+
+* PDAL Python extension is now in its own repository on its own release
+  schedule at https://github.com/PDAL/python
+
+* Extension now builds and works under PDAL OSGeo4W64 on Windows.
+
+


=====================================
PKG-INFO
=====================================
@@ -1,12 +1,13 @@
-Metadata-Version: 1.1
+Metadata-Version: 1.2
 Name: PDAL
-Version: 2.0.0
+Version: 2.1.0rc1
 Summary: Point cloud data processing
 Home-page: http://pdal.io
 Author: Howard Butler
 Author-email: howard at hobu.co
+Maintainer: Howard Butler
+Maintainer-email: howard at hobu.co
 License: BSD
-Description-Content-Type: UNKNOWN
 Description: ================================================================================
         PDAL
         ================================================================================
@@ -44,7 +45,7 @@ Description: ===================================================================
             import pdal
             pipeline = pdal.Pipeline(json)
             pipeline.validate() # check if our JSON and options were good
-            pipeline.loglevel = 9 #really noisy
+            pipeline.loglevel = 8 #really noisy
             count = pipeline.execute()
             arrays = pipeline.arrays
             metadata = pipeline.metadata
@@ -55,17 +56,32 @@ Description: ===================================================================
         .. _`schema`: http://www.pdal.io/dimensions.html
         .. _`metadata`: http://www.pdal.io/development/metadata.html
         
+        
+        .. image:: https://travis-ci.org/PDAL/python.svg?branch=master
+            :target: https://travis-ci.org/PDAL/python
+        
         Requirements
         ================================================================================
         
         * PDAL 1.7+
         * Python >=2.7 (including Python 3.x)
+        * Cython (eg :code:`pip install cython`)
+        * Packaging (eg :code:`pip install packaging`)
         
         
         
         Changes
         ================================================================================
         
+        2.0.0
+        --------------------------------------------------------------------------------
+        
+        * PDAL Python extension is now in its own repository on its own release
+          schedule at https://github.com/PDAL/python
+        
+        * Extension now builds and works under PDAL OSGeo4W64 on Windows.
+        
+        
         
 Keywords: point cloud spatial
 Platform: UNKNOWN
@@ -78,3 +94,4 @@ Classifier: Programming Language :: Python :: 2.7
 Classifier: Programming Language :: Python :: 3
 Classifier: Topic :: Scientific/Engineering :: GIS
 Requires: Python (>=2.7)
+Requires: Numpy


=====================================
README.rst
=====================================
@@ -35,7 +35,7 @@ sorts it by the ``X`` dimension:
     import pdal
     pipeline = pdal.Pipeline(json)
     pipeline.validate() # check if our JSON and options were good
-    pipeline.loglevel = 9 #really noisy
+    pipeline.loglevel = 8 #really noisy
     count = pipeline.execute()
     arrays = pipeline.arrays
     metadata = pipeline.metadata
@@ -46,9 +46,15 @@ sorts it by the ``X`` dimension:
 .. _`schema`: http://www.pdal.io/dimensions.html
 .. _`metadata`: http://www.pdal.io/development/metadata.html
 
+
+.. image:: https://travis-ci.org/PDAL/python.svg?branch=master
+    :target: https://travis-ci.org/PDAL/python
+
 Requirements
 ================================================================================
 
 * PDAL 1.7+
 * Python >=2.7 (including Python 3.x)
+* Cython (eg :code:`pip install cython`)
+* Packaging (eg :code:`pip install packaging`)
 


=====================================
VERSION.txt
=====================================
@@ -1 +1 @@
-2.0.0
\ No newline at end of file
+2.1.0rc1
\ No newline at end of file


=====================================
pdal/PyArray.hpp
=====================================
@@ -66,8 +66,23 @@ class PDAL_DLL Array
 {
 public:
 
-    Array() : m_py_array(0)
-    {}
+    Array() : m_py_array(0), m_own_array(true)
+    {
+#undef NUMPY_IMPORT_ARRAY_RETVAL
+#define NUMPY_IMPORT_ARRAY_RETVAL
+        import_array();
+    }
+
+    Array(PyObject* array) : m_py_array(array), m_own_array(false)
+    {
+#undef NUMPY_IMPORT_ARRAY_RETVAL
+#define NUMPY_IMPORT_ARRAY_RETVAL
+        import_array();
+        if (!PyArray_Check(array))
+            throw pdal::pdal_error("pdal::python::Array constructor object is not a numpy array");
+        Py_XINCREF(array);
+
+    }
 
     ~Array()
     {
@@ -85,13 +100,13 @@ public:
         npy_intp* ndims = &mydims;
         std::vector<npy_intp> strides(dims.size());
 
-
         DataPtr pdata( new std::vector<uint8_t>(view->pointSize()* view->size(), 0));
 
-        PyArray_Descr *dtype(0);
+        PyArray_Descr *dtype = nullptr;
         PyObject * dtype_dict = (PyObject*)buildNumpyDescription(view);
         if (!dtype_dict)
             throw pdal_error("Unable to build numpy dtype description dictionary");
+
         int did_convert = PyArray_DescrConverter(dtype_dict, &dtype);
         if (did_convert == NPY_FAIL)
             throw pdal_error("Unable to build numpy dtype");
@@ -126,16 +141,22 @@ public:
     }
 
 
-    inline PyObject* getPythonArray() const { return m_py_array; }
-
+    inline PyObject* getPythonArray() const
+    {
+        return m_py_array;
+    }
 
 private:
 
     inline void cleanup()
     {
         PyObject* p = (PyObject*)(m_py_array);
+        if (m_own_array)
+        {
+            m_data_array.reset();
+        }
+
         Py_XDECREF(p);
-        m_data_array.reset();
     }
 
     inline PyObject* buildNumpyDescription(PointViewPtr view) const
@@ -170,6 +191,8 @@ private:
             Dimension::BaseType b = Dimension::base(t);
             if (b == Dimension::BaseType::Unsigned)
                 kind = "u";
+            else if (b == Dimension::BaseType::Signed)
+                kind = "i";
             else if (b == Dimension::BaseType::Floating)
                 kind = "f";
             else
@@ -201,8 +224,12 @@ private:
         return dict;
     }
 
+
+
+
     PyObject* m_py_array;
     std::unique_ptr<std::vector<uint8_t> > m_data_array;
+    bool m_own_array;
 
     Array& operator=(Array const& rhs);
 };


=====================================
pdal/PyDimension.hpp
=====================================
@@ -0,0 +1,97 @@
+/******************************************************************************
+* Copyright (c) 2018, Howard Butler (howard at hobu.co)
+*
+* All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following
+* conditions are met:
+*
+*     * Redistributions of source code must retain the above copyright
+*       notice, this list of conditions and the following disclaimer.
+*     * Redistributions in binary form must reproduce the above copyright
+*       notice, this list of conditions and the following disclaimer in
+*       the documentation and/or other materials provided
+*       with the distribution.
+*     * Neither the name of Hobu, Inc. or Flaxen Geo Consulting nor the
+*       names of its contributors may be used to endorse or promote
+*       products derived from this software without specific prior
+*       written permission.
+*
+* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
+* OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
+* AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
+* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY
+* OF SUCH DAMAGE.
+****************************************************************************/
+
+#pragma once
+
+#include <pdal/PointView.hpp>
+#include <pdal/Dimension.hpp>
+
+#include <algorithm>
+#include <vector>
+
+typedef struct Dimension
+{
+    std::string name;
+    std::string description;
+    std::string type;
+    int size;
+    std::string units;
+} Dimension;
+
+inline std::vector<Dimension> getValidDimensions()
+{
+    std::vector<Dimension> output;
+
+    int id = (int)pdal::Dimension::Id::Unknown + 1;
+
+    while(1)
+    {
+        pdal::Dimension::Id pid = (pdal::Dimension::Id)id;
+        std::string name(pdal::Dimension::name(pid));
+        if (name.empty())
+            break;
+
+        pdal::Dimension::Type t = pdal::Dimension::defaultType(pid);
+
+        Dimension d;
+        d.name = name;
+        d.description = pdal::Dimension::description(pid);
+        d.size = pdal::Dimension::size(t);
+
+        std::string kind("i");
+        pdal::Dimension::BaseType b = pdal::Dimension::base(t);
+        if (b == pdal::Dimension::BaseType::Unsigned)
+            kind = "u";
+        else if (b == pdal::Dimension::BaseType::Signed)
+            kind = "i";
+        else if (b == pdal::Dimension::BaseType::Floating)
+            kind = "f";
+        else
+        {
+            std::stringstream oss;
+            oss << "unable to map kind '" << kind <<"' to PDAL dimension type";
+            throw pdal::pdal_error(oss.str());
+        }
+        d.type = kind;
+
+
+        output.push_back(d);
+        id++;
+
+    }
+
+    return output;
+
+
+}


=====================================
pdal/PyPipeline.cpp
=====================================
@@ -46,14 +46,67 @@
 #include <numpy/arrayobject.h>
 
 #include "PyArray.hpp"
+#include <pdal/Stage.hpp>
+#include <pdal/PipelineWriter.hpp>
+#include <pdal/io/NumpyReader.hpp>
 
 namespace libpdalpython
 {
 
 using namespace pdal::python;
 
+Pipeline::Pipeline(std::string const& json, std::vector<Array*> arrays)
+{
+
+#ifndef _WIN32
+    ::dlopen("libpdal_base.so", RTLD_NOLOAD | RTLD_GLOBAL);
+    ::dlopen("libpdal_plugin_reader_numpy.so", RTLD_NOLOAD | RTLD_GLOBAL);
+#endif
+
+#undef NUMPY_IMPORT_ARRAY_RETVAL
+#define NUMPY_IMPORT_ARRAY_RETVAL
+    import_array();
+
+    m_executor = std::shared_ptr<pdal::PipelineExecutor>(new pdal::PipelineExecutor(json));
+
+    pdal::PipelineManager& manager = m_executor->getManager();
+
+    std::stringstream strm(json);
+    manager.readPipeline(strm);
+
+    pdal::Stage *r = manager.getStage();
+    if (!r)
+        throw pdal::pdal_error("pipeline had no stages!");
+
+    int counter = 1;
+    for (auto array: arrays)
+    {
+        // Create numpy reader for each array
+        pdal::Options options;
+        std::stringstream tag;
+        tag << "readers_numpy" << counter;
+        pdal::StageCreationOptions opts { "", "readers.numpy", nullptr, options, tag.str()};
+        pdal::Stage& reader = manager.makeReader(opts);
+
+        pdal::NumpyReader* np_reader = dynamic_cast<pdal::NumpyReader*>(&reader);
+        if (!np_reader)
+            throw pdal::pdal_error("couldn't cast reader!");
+
+        PyObject* parray = (PyObject*)array->getPythonArray();
+        if (!parray)
+            throw pdal::pdal_error("array was none!");
+
+        np_reader->setArray(parray);
+
+        r->setInput(reader);
+        counter++;
+
+    }
+
+    manager.validateStageOptions();
+}
+
 Pipeline::Pipeline(std::string const& json)
-    : m_executor(json)
 {
     // Make the symbols in pdal_base global so that they're accessible
     // to PDAL plugins.  Python dlopen's this extension with RTLD_LOCAL,
@@ -67,6 +120,8 @@ Pipeline::Pipeline(std::string const& json)
 #undef NUMPY_IMPORT_ARRAY_RETVAL
 #define NUMPY_IMPORT_ARRAY_RETVAL
     import_array();
+
+    m_executor = std::shared_ptr<pdal::PipelineExecutor>(new pdal::PipelineExecutor(json));
 }
 
 Pipeline::~Pipeline()
@@ -75,34 +130,34 @@ Pipeline::~Pipeline()
 
 void Pipeline::setLogLevel(int level)
 {
-    m_executor.setLogLevel(level);
+    m_executor->setLogLevel(level);
 }
 
 int Pipeline::getLogLevel() const
 {
-    return static_cast<int>(m_executor.getLogLevel());
+    return static_cast<int>(m_executor->getLogLevel());
 }
 
 int64_t Pipeline::execute()
 {
 
-    int64_t count = m_executor.execute();
+    int64_t count = m_executor->execute();
     return count;
 }
 
 bool Pipeline::validate()
 {
-    return m_executor.validate();
+    return m_executor->validate();
 }
 
 std::vector<Array *> Pipeline::getArrays() const
 {
     std::vector<Array *> output;
 
-    if (!m_executor.executed())
+    if (!m_executor->executed())
         throw python_error("call execute() before fetching arrays");
 
-    const pdal::PointViewSet& pvset = m_executor.getManagerConst().views();
+    const pdal::PointViewSet& pvset = m_executor->getManagerConst().views();
 
     for (auto i: pvset)
     {


=====================================
pdal/PyPipeline.hpp
=====================================
@@ -41,6 +41,8 @@
 
 #include <string>
 #include <sstream>
+#include <memory>
+
 #undef toupper
 #undef tolower
 #undef isspace
@@ -65,26 +67,27 @@ public:
 
 class Pipeline {
 public:
-    Pipeline(std::string const& xml);
+    Pipeline(std::string const& json);
+    Pipeline(std::string const& json, std::vector<pdal::python::Array*> arrays);
     ~Pipeline();
 
     int64_t execute();
     bool validate();
     inline std::string getPipeline() const
     {
-        return m_executor.getPipeline();
+        return m_executor->getPipeline();
     }
     inline std::string getMetadata() const
     {
-        return m_executor.getMetadata();
+        return m_executor->getMetadata();
     }
     inline std::string getSchema() const
     {
-        return m_executor.getSchema();
+        return m_executor->getSchema();
     }
     inline std::string getLog() const
     {
-        return m_executor.getLog();
+        return m_executor->getLog();
     }
     std::vector<pdal::python::Array *> getArrays() const;
 
@@ -92,7 +95,7 @@ public:
     int getLogLevel() const;
 
 private:
-    pdal::PipelineExecutor m_executor;
+    std::shared_ptr<pdal::PipelineExecutor> m_executor;
 };
 
 }


=====================================
pdal/__init__.py
=====================================
@@ -1,3 +1,5 @@
-__version__='2.0.0'
+__version__='2.1.0rc1'
 
 from .pipeline import Pipeline
+from .array import Array
+from .dimension import dimensions


=====================================
pdal/array.py
=====================================
@@ -0,0 +1,8 @@
+import numpy as np
+from pdal import libpdalpython
+
+class Array(object):
+    """A Numpy Array that can speak PDAL"""
+
+    def __init__(self, data):
+        self.p = libpdalpython.PyArray(data)


=====================================
pdal/dimension.py
=====================================
@@ -0,0 +1,6 @@
+import numpy as np
+from pdal import libpdalpython
+from pdal.libpdalpython import getDimensions
+
+dimensions = getDimensions()
+


=====================================
pdal/libpdalpython.cpp
=====================================
The diff for this file was not included because it is too large.

=====================================
pdal/libpdalpython.pyx
=====================================
@@ -1,4 +1,5 @@
 # distutils: language = c++
+# cython: c_string_type=unicode, c_string_encoding=utf8
 
 from libcpp.vector cimport vector
 from libcpp.string cimport string
@@ -14,11 +15,13 @@ from cython.operator cimport dereference as deref, preincrement as inc
 
 cdef extern from "PyArray.hpp" namespace "pdal::python":
     cdef cppclass Array:
+        Array(object) except +
         void* getPythonArray() except+
 
 cdef extern from "PyPipeline.hpp" namespace "libpdalpython":
     cdef cppclass Pipeline:
         Pipeline(const char* ) except +
+        Pipeline(const char*, vector[Array*]& ) except +
         int64_t execute() except +
         bool validate() except +
         string getPipeline() except +
@@ -29,26 +32,86 @@ cdef extern from "PyPipeline.hpp" namespace "libpdalpython":
         int getLogLevel()
         void setLogLevel(int)
 
+cdef class PyArray:
+    cdef Array *thisptr
+    def __cinit__(self, object array):
+        self.thisptr = new Array(array)
+    def __dealloc__(self):
+        del self.thisptr
+
+cdef extern from "PyDimension.hpp":
+    ctypedef struct Dimension:
+        string name;
+        string description;
+        int size;
+        string type;
+##         string units; // Not defined by PDAL yet
+
+    cdef vector[Dimension] getValidDimensions() except +
+
+
+def getDimensions():
+        cdef vector[Dimension] c_dims;
+        c_dims = getValidDimensions()
+        output = []
+        cdef vector[Dimension].iterator it = c_dims.begin()
+        while it != c_dims.end():
+            ptr = deref(it)
+            d = {}
+            d['name'] = ptr.name
+            d['description'] = ptr.description
+            kind = ptr.type + str(ptr.size)
+            d['dtype'] = np.dtype(kind)
+            ptr = deref(it)
+            output.append(d)
+            inc(it)
+        return output
+
+
 cdef class PyPipeline:
     cdef Pipeline *thisptr      # hold a c++ instance which we're wrapping
-    def __cinit__(self, unicode json):
-        cdef char* x
+
+
+    def __cinit__(self, unicode json, list arrays=None):
+        cdef char* x = NULL
+        cdef int n_arrays;
+        if arrays:
+            n_arrays = len(arrays)
+
+        cdef vector[Array*] c_arrays;
+        cdef np.ndarray np_array;
+        cdef Array* a
+
+        if arrays is not None:
+            for array in arrays:
+                a = new Array(array)
+                c_arrays.push_back(a)
+
         if PY_MAJOR_VERSION >= 3:
-            py_byte_string = json.encode('UTF-8')
-            x= py_byte_string
-            self.thisptr = new Pipeline(x)
+            if arrays:
+                self.thisptr = new Pipeline(json.encode('UTF-8'), c_arrays)
+            else:
+                self.thisptr = new Pipeline(json.encode('UTF-8'))
         else:
-            self.thisptr = new Pipeline(json)
+            if arrays:
+                self.thisptr = new Pipeline(json, c_arrays)
+            else:
+                self.thisptr = new Pipeline(json)
+#        if arrays:
+#            self.thisptr = new Pipeline(json.encode('UTF-8'), c_arrays)
+#        else:
+#            self.thisptr = new Pipeline(json.encode('UTF-8'))
+
     def __dealloc__(self):
         del self.thisptr
 
     property pipeline:
         def __get__(self):
-            return self.thisptr.getPipeline().decode('UTF-8')
+            return self.thisptr.getPipeline()
 
     property metadata:
         def __get__(self):
-            return self.thisptr.getMetadata().decode('UTF-8')
+            return self.thisptr.getMetadata()
 
     property loglevel:
         def __get__(self):
@@ -59,13 +122,13 @@ cdef class PyPipeline:
     property log:
         def __get__(self):
 
-            return self.thisptr.getLog().decode('UTF-8')
+            return self.thisptr.getLog()
 
     property schema:
         def __get__(self):
             import json
 
-            j = self.thisptr.getSchema().decode('UTF-8')
+            j = self.thisptr.getSchema()
             return json.loads(j)
 
     property arrays:


=====================================
pdal/pipeline.py
=====================================
@@ -1,16 +1,17 @@
 
 from pdal import libpdalpython
+import numpy as np
 
 class Pipeline(object):
     """A PDAL pipeline object, defined by JSON. See http://www.pdal.io/pipeline.html for more
     information on how to define one"""
 
-    def __init__(self, json):
-        if isinstance(json, str):
-            data = json
+    def __init__(self, json, arrays=None):
+
+        if arrays:
+            self.p = libpdalpython.PyPipeline(json, arrays)
         else:
-            data = json.decode('UTF-8')
-        self.p = libpdalpython.PyPipeline(data)
+            self.p = libpdalpython.PyPipeline(json)
 
     def get_metadata(self):
         return self.p.metadata


=====================================
setup.py
=====================================
@@ -110,7 +110,7 @@ extra_compile_args = []
 
 if os.name in ['nt']:
     library_dirs = ['c:/OSGeo4W64/lib']
-    libraries = ['pdalcpp','pdal_util','ws2_32']
+    libraries = ['pdalcpp','pdal_plugin_reader_numpy','pdal_util','ws2_32']
     extra_compile_args = ['/DNOMINMAX',]
 
 from setuptools.extension import Extension as DistutilsExtension
@@ -154,11 +154,15 @@ include_dirs.append(numpy.get_include())
 if os.name != 'nt':
     extra_compile_args = ['-std=c++11','-Wno-unknown-pragmas']
 
-DEBUG=False
+if platform.system() == 'Darwin':
+    extra_link_args.append('-Wl,-rpath,'+library_dirs[0])
+
+DEBUG=True
 if DEBUG:
     if os.name != 'nt':
         extra_compile_args += ['-g','-O0']
 
+libraries.append('pdal_plugin_reader_numpy')
 sources=['pdal/libpdalpython'+ext, "pdal/PyPipeline.cpp"  ]
 extensions = [DistutilsExtension("*",
                                    sources,
@@ -174,7 +178,7 @@ if USE_CYTHON and "clean" not in sys.argv:
 setup_args = dict(
     name                = 'PDAL',
     version             = str(module_version),
-    requires            = ['Python (>=2.7)', ],
+    requires            = ['Python (>=2.7)', 'Numpy'],
     description         = 'Point cloud data processing',
     license             = 'BSD',
     keywords            = 'point cloud spatial',
@@ -199,7 +203,7 @@ setup_args = dict(
         'Topic :: Scientific/Engineering :: GIS',
     ],
     cmdclass           = {},
-    install_requires   = ['numpy', 'packaging'],
+    install_requires   = ['numpy', 'packaging', 'cython'],
 )
 setup(ext_modules=extensions, **setup_args)
 


=====================================
test/test_pipeline.py
=====================================
@@ -1,6 +1,7 @@
 import unittest
 import pdal
 import os
+import numpy as np
 
 DATADIRECTORY = "./test/data"
 
@@ -16,9 +17,9 @@ bad_json = u"""
 }
 """
 
-print (os.path.abspath(os.path.join(DATADIRECTORY, 'sort.json')))
 
-class TestPipeline(unittest.TestCase):
+
+class PDALTest(unittest.TestCase):
 
     def fetch_json(self, filename):
         import os
@@ -28,55 +29,60 @@ class TestPipeline(unittest.TestCase):
             output = f.read().decode('UTF-8')
         return output
 
+class TestPipeline(PDALTest):
+#
     @unittest.skipUnless(os.path.exists(os.path.join(DATADIRECTORY, 'sort.json')),
                          "missing test data")
     def test_construction(self):
         """Can we construct a PDAL pipeline"""
         json = self.fetch_json('sort.json')
         r = pdal.Pipeline(json)
-
+#
     @unittest.skipUnless(os.path.exists(os.path.join(DATADIRECTORY, 'sort.json')),
                          "missing test data")
     def test_execution(self):
         """Can we execute a PDAL pipeline"""
         x = self.fetch_json('sort.json')
         r = pdal.Pipeline(x)
+        r.validate()
         r.execute()
         self.assertGreater(len(r.pipeline), 200)
-
+#
     def test_validate(self):
         """Do we complain with bad pipelines"""
         r = pdal.Pipeline(bad_json)
         with self.assertRaises(RuntimeError):
             r.validate()
-
+#
     @unittest.skipUnless(os.path.exists(os.path.join(DATADIRECTORY, 'sort.json')),
                          "missing test data")
     def test_array(self):
         """Can we fetch PDAL data as a numpy array"""
         json = self.fetch_json('sort.json')
         r = pdal.Pipeline(json)
+        r.validate()
         r.execute()
         arrays = r.arrays
         self.assertEqual(len(arrays), 1)
-
+#
         a = arrays[0]
         self.assertAlmostEqual(a[0][0], 635619.85, 7)
         self.assertAlmostEqual(a[1064][2], 456.92, 7)
-
+#
     @unittest.skipUnless(os.path.exists(os.path.join(DATADIRECTORY, 'sort.json')),
                          "missing test data")
     def test_metadata(self):
         """Can we fetch PDAL metadata"""
         json = self.fetch_json('sort.json')
         r = pdal.Pipeline(json)
+        r.validate()
         r.execute()
         metadata = r.metadata
         import json
         j = json.loads(metadata)
-        self.assertEqual(j["metadata"]["readers.las"]["count"], 1065)
-
-
+        self.assertEqual(j["metadata"]["readers.las"][0]["count"], 1065)
+#
+#
     @unittest.skipUnless(os.path.exists(os.path.join(DATADIRECTORY, 'sort.json')),
                          "missing test data")
     def test_no_execute(self):
@@ -85,7 +91,7 @@ class TestPipeline(unittest.TestCase):
         r = pdal.Pipeline(json)
         with self.assertRaises(RuntimeError):
             r.arrays
-
+#
     @unittest.skipUnless(os.path.exists(os.path.join(DATADIRECTORY, 'reproject.json')),
                          "missing test data")
     def test_logging(self):
@@ -93,32 +99,69 @@ class TestPipeline(unittest.TestCase):
         json = self.fetch_json('reproject.json')
         r = pdal.Pipeline(json)
         r.loglevel = 8
+        r.validate()
         count = r.execute()
         self.assertEqual(count, 789)
         self.assertEqual(r.log.split()[0], '(pypipeline')
-
+#
     @unittest.skipUnless(os.path.exists(os.path.join(DATADIRECTORY, 'sort.json')),
                          "missing test data")
     def test_schema(self):
         """Fetching a schema works"""
         json = self.fetch_json('sort.json')
         r = pdal.Pipeline(json)
+        r.validate()
         r.execute()
         self.assertEqual(r.schema['schema']['dimensions'][0]['name'], 'X')
-
+#
     @unittest.skipUnless(os.path.exists(os.path.join(DATADIRECTORY, 'chip.json')),
                          "missing test data")
     def test_merged_arrays(self):
         """Can we fetch multiple point views from merged PDAL data """
         json = self.fetch_json('chip.json')
         r = pdal.Pipeline(json)
+        r.validate()
         r.execute()
         arrays = r.arrays
         self.assertEqual(len(arrays), 43)
+#
+class TestArrayLoad(PDALTest):
+
+    def test_merged_arrays(self):
+        """Can we load data from a a list of arrays to PDAL"""
+        data = np.load(os.path.join(DATADIRECTORY, 'perlin.npy'))
+
+        arrays = [data, data, data]
+
+        json = self.fetch_json('chip.json')
+        chip =u"""{
+  "pipeline":[
+    {
+      "type":"filters.range",
+      "limits":"Intensity[0:0.10]"
+    }
+  ]
+}"""
+
+        p = pdal.Pipeline(chip, arrays)
+        p.loglevel = 8
+        count = p.execute()
+        arrays = p.arrays
+        self.assertEqual(len(arrays), 3)
+
+        data = arrays[0]
+        self.assertEqual(len(data), 1836)
+        self.assertEqual(sum([len(i) for i in arrays]), 3*1836)
+
+class TestDimensions(PDALTest):
+    def test_fetch_dimensions(self):
+        """Ask PDAL for its valid dimensions list"""
+        dims = pdal.dimensions
+        self.assertEqual(len(dims), 72)
 
 def test_suite():
     return unittest.TestSuite(
-        [TestXML])
+        [TestPipeline])
 
 if __name__ == '__main__':
     unittest.main()



View it on GitLab: https://salsa.debian.org/debian-gis-team/python-pdal/commit/7bf8b61b4f70b9651624776e14824fca32045122

-- 
View it on GitLab: https://salsa.debian.org/debian-gis-team/python-pdal/commit/7bf8b61b4f70b9651624776e14824fca32045122
You're receiving this email because of your account on salsa.debian.org.
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://alioth-lists.debian.net/pipermail/pkg-grass-devel/attachments/20181031/07d33896/attachment-0001.html>


More information about the Pkg-grass-devel mailing list