[pysal] 05/06: Drop patches, applied upstream.
Bas Couwenberg
sebastic at debian.org
Sun Sep 24 09:45:26 UTC 2017
This is an automated email from the git hooks/post-receive script.
sebastic pushed a commit to branch master
in repository pysal.
commit a7c2761d46ea844115e4f4ef6b893dbe8172e69c
Author: Bas Couwenberg <sebastic at xs4all.nl>
Date: Sun Sep 24 11:21:51 2017 +0200
Drop patches, applied upstream.
---
debian/changelog | 1 +
.../fix-for-numpy-bump-induced-breakage.patch | 346 ---------------------
debian/patches/series | 2 -
debian/patches/unusual-interpreter.patch | 12 -
4 files changed, 1 insertion(+), 360 deletions(-)
diff --git a/debian/changelog b/debian/changelog
index 72d0f17..afb0af0 100644
--- a/debian/changelog
+++ b/debian/changelog
@@ -3,6 +3,7 @@ pysal (1.14.2-1) UNRELEASED; urgency=medium
* Team upload.
* New upstream release.
* Bump Standards-Version to 4.1.0, no changes.
+ * Drop patches, applied upstream.
-- Bas Couwenberg <sebastic at debian.org> Sun, 24 Sep 2017 11:17:32 +0200
diff --git a/debian/patches/fix-for-numpy-bump-induced-breakage.patch b/debian/patches/fix-for-numpy-bump-induced-breakage.patch
deleted file mode 100644
index cf12480..0000000
--- a/debian/patches/fix-for-numpy-bump-induced-breakage.patch
+++ /dev/null
@@ -1,346 +0,0 @@
-Description: Fix for numpy bump induced breakage.
- - casting index to int
- - casting to ints
- - fix for numpy bump (casting and reshape behavior)
-Author: Serge Rey <sjsrey at gmail.com>
-Bug: https://github.com/pysal/pysal/issues/896
-Bug-Debian: https://bugs.debian.org/848783
-
---- a/pysal/esda/smoothing.py
-+++ b/pysal/esda/smoothing.py
-@@ -555,11 +555,11 @@ def assuncao_rate(e, b):
- class _Smoother(object):
- """
- This is a helper class that implements things that all smoothers should do.
-- Right now, the only thing that we need to propagate is the by_col function.
-+ Right now, the only thing that we need to propagate is the by_col function.
-
- TBQH, most of these smoothers should be functions, not classes (aside from
- maybe headbanging triples), since they're literally only inits + one
-- attribute.
-+ attribute.
- """
- def __init__(self):
- pass
-@@ -567,7 +567,7 @@ class _Smoother(object):
- @classmethod
- def by_col(cls, df, e,b, inplace=False, **kwargs):
- """
-- Compute smoothing by columns in a dataframe.
-+ Compute smoothing by columns in a dataframe.
-
- Parameters
- -----------
-@@ -582,15 +582,15 @@ class _Smoother(object):
- inplace : bool
- a flag denoting whether to output a copy of `df` with the
- relevant smoothed columns appended, or to append the columns
-- directly to `df` itself.
-+ directly to `df` itself.
- **kwargs: optional keyword arguments
- optional keyword options that are passed directly to the
-- smoother.
-+ smoother.
-
- Returns
- ---------
- a copy of `df` containing the columns. Or, if `inplace`, this returns
-- None, but implicitly adds columns to `df`.
-+ None, but implicitly adds columns to `df`.
- """
- if not inplace:
- new = df.copy()
-@@ -718,13 +718,13 @@ class Empirical_Bayes(_Smoother):
- class _Spatial_Smoother(_Smoother):
- """
- This is a helper class that implements things that all the things that
-- spatial smoothers should do.
-+ spatial smoothers should do.
- .
-- Right now, the only thing that we need to propagate is the by_col function.
-+ Right now, the only thing that we need to propagate is the by_col function.
-
- TBQH, most of these smoothers should be functions, not classes (aside from
- maybe headbanging triples), since they're literally only inits + one
-- attribute.
-+ attribute.
- """
- def __init__(self):
- pass
-@@ -732,7 +732,7 @@ class _Spatial_Smoother(_Smoother):
- @classmethod
- def by_col(cls, df, e,b, w=None, inplace=False, **kwargs):
- """
-- Compute smoothing by columns in a dataframe.
-+ Compute smoothing by columns in a dataframe.
-
- Parameters
- -----------
-@@ -748,19 +748,19 @@ class _Spatial_Smoother(_Smoother):
- the spatial weights object or objects to use with the
- event-population pairs. If not provided and a weights object
- is in the dataframe's metadata, that weights object will be
-- used.
-+ used.
- inplace : bool
- a flag denoting whether to output a copy of `df` with the
- relevant smoothed columns appended, or to append the columns
-- directly to `df` itself.
-+ directly to `df` itself.
- **kwargs: optional keyword arguments
- optional keyword options that are passed directly to the
-- smoother.
-+ smoother.
-
- Returns
- ---------
- a copy of `df` containing the columns. Or, if `inplace`, this returns
-- None, but implicitly adds columns to `df`.
-+ None, but implicitly adds columns to `df`.
- """
- if not inplace:
- new = df.copy()
-@@ -1072,12 +1072,12 @@ class Age_Adjusted_Smoother(_Spatial_Smo
- r = direct_age_standardization(e_n, b_n, s, w.n, alpha=alpha)
- self.r = np.array([i[0] for i in r])
- w.transform = 'o'
--
-+
- @_requires('pandas')
- @classmethod
- def by_col(cls, df, e,b, w=None, s=None, **kwargs):
- """
-- Compute smoothing by columns in a dataframe.
-+ Compute smoothing by columns in a dataframe.
-
- Parameters
- -----------
-@@ -1093,22 +1093,22 @@ class Age_Adjusted_Smoother(_Spatial_Smo
- the spatial weights object or objects to use with the
- event-population pairs. If not provided and a weights object
- is in the dataframe's metadata, that weights object will be
-- used.
-+ used.
- s : string or list of strings
- the name or names of columns to use as a standard population
-- variable for the events `e` and at-risk populations `b`.
-+ variable for the events `e` and at-risk populations `b`.
- inplace : bool
- a flag denoting whether to output a copy of `df` with the
- relevant smoothed columns appended, or to append the columns
-- directly to `df` itself.
-+ directly to `df` itself.
- **kwargs: optional keyword arguments
- optional keyword options that are passed directly to the
-- smoother.
-+ smoother.
-
- Returns
- ---------
- a copy of `df` containing the columns. Or, if `inplace`, this returns
-- None, but implicitly adds columns to `df`.
-+ None, but implicitly adds columns to `df`.
- """
- if s is None:
- raise Exception('Standard population variable "s" must be supplied.')
-@@ -1326,7 +1326,7 @@ class Spatial_Median_Rate(_Spatial_Smoot
- if not w.id_order_set:
- raise ValueError("w id_order must be set to align with the order of e and b")
- e = np.asarray(e).flatten()
-- b = np.asarray(b).flatten()
-+ b = np.asarray(b).flatten()
- self.r = e * 1.0 / b
- self.aw, self.w = aw, w
- while iteration:
-@@ -1469,13 +1469,13 @@ class Spatial_Filtering(_Smoother):
- b_n_f = b_n[[0]]
- self.r.append(e_n_f[-1] * 1.0 / b_n_f[-1])
- self.r = np.array(self.r)
--
-+
- @_requires('pandas')
- @classmethod
- def by_col(cls, df, e, b, x_grid, y_grid, geom_col='geometry', **kwargs):
- """
- Compute smoothing by columns in a dataframe. The bounding box and point
-- information is computed from the geometry column.
-+ information is computed from the geometry column.
-
- Parameters
- -----------
-@@ -1493,13 +1493,13 @@ class Spatial_Filtering(_Smoother):
- number of grid cells to use along the y-axis
- geom_col: string
- the name of the column in the dataframe containing the
-- geometry information.
-+ geometry information.
- **kwargs: optional keyword arguments
- optional keyword options that are passed directly to the
-- smoother.
-+ smoother.
- Returns
- ---------
-- a new dataframe of dimension (x_grid*y_grid, 3), containing the
-+ a new dataframe of dimension (x_grid*y_grid, 3), containing the
- coordinates of the grid cells and the rates associated with those grid
- cells.
- """
-@@ -1525,8 +1525,8 @@ class Spatial_Filtering(_Smoother):
- grid = np.asarray(r.grid).reshape(-1,2)
- name = '_'.join(('-'.join((ename, bname)), cls.__name__.lower()))
- colnames = ('_'.join((name, suffix)) for suffix in ['X', 'Y', 'R'])
-- items = [(name, col) for name,col in zip(colnames, [grid[:,0],
-- grid[:,1],
-+ items = [(name, col) for name,col in zip(colnames, [grid[:,0],
-+ grid[:,1],
- r.r])]
- res.append(pd.DataFrame.from_items(items))
- outdf = pd.concat(res)
-@@ -1821,9 +1821,9 @@ class Headbanging_Median_Rate(object):
- if hasattr(self, 'extra') and id in self.extra:
- extra = self.extra
- trp_r = r[list(triples[0])]
-- # observed rate
-+ # observed rate
- # plus difference in rate scaled by ratio of extrapolated distance
-- # & observed distance.
-+ # & observed distance.
- trp_r[-1] = trp_r[0] + (trp_r[0] - trp_r[-1]) * (
- extra[id][-1] * 1.0 / extra[id][1])
- trp_r = sorted(trp_r)
-@@ -1852,14 +1852,14 @@ class Headbanging_Median_Rate(object):
- trp_r.sort(order='r')
- lowest.append(trp_r['r'][0])
- highest.append(trp_r['r'][-1])
-- lowest_aw.append(self.aw[trp_r['w'][0]])
-- highest_aw.append(self.aw[trp_r['w'][-1]])
-+ lowest_aw.append(self.aw[int(trp_r['w'][0])])
-+ highest_aw.append(self.aw[int(trp_r['w'][-1])])
- wm_lowest = weighted_median(np.array(lowest), np.array(lowest_aw))
- wm_highest = weighted_median(
- np.array(highest), np.array(highest_aw))
- triple_members = flatten(triples, unique=False)
- return r[id], wm_lowest, wm_highest, self.aw[id] * len(triples), self.aw[triple_members].sum()
--
-+
- def __get_median_from_screens(self, screens):
- if isinstance(screens, float):
- return screens
-@@ -1884,13 +1884,13 @@ class Headbanging_Median_Rate(object):
- k, tr[k], weighted=(self.aw is not None))
- new_r.append(self.__get_median_from_screens(screens))
- self.r = np.array(new_r)
--
-+
- @_requires('pandas')
- @classmethod
- def by_col(cls, df, e, b, t=None, geom_col='geometry', inplace=False, **kwargs):
- """
- Compute smoothing by columns in a dataframe. The bounding box and point
-- information is computed from the geometry column.
-+ information is computed from the geometry column.
-
- Parameters
- -----------
-@@ -1904,22 +1904,22 @@ class Headbanging_Median_Rate(object):
- variables to be smoothed
- t : Headbanging_Triples instance or list of Headbanging_Triples
- list of headbanging triples instances. If not provided, this
-- is computed from the geometry column of the dataframe.
-+ is computed from the geometry column of the dataframe.
- geom_col: string
- the name of the column in the dataframe containing the
-- geometry information.
-+ geometry information.
- inplace : bool
- a flag denoting whether to output a copy of `df` with the
- relevant smoothed columns appended, or to append the columns
-- directly to `df` itself.
-+ directly to `df` itself.
- **kwargs: optional keyword arguments
- optional keyword options that are passed directly to the
-- smoother.
-+ smoother.
- Returns
- ---------
- a new dataframe containing the smoothed Headbanging Median Rates for the
- event/population pairs. If done inplace, there is no return value and
-- `df` is modified in place.
-+ `df` is modified in place.
- """
- import pandas as pd
- if not inplace:
-@@ -1939,7 +1939,7 @@ class Headbanging_Median_Rate(object):
-
- #Headbanging_Triples doesn't take **kwargs, so filter its arguments
- # (self, data, w, k=5, t=3, angle=135.0, edgecor=False):
--
-+
- w = kwargs.pop('w', None)
- if w is None:
- found = False
-@@ -1951,7 +1951,7 @@ class Headbanging_Median_Rate(object):
- raise Exception('Weights not provided and no weights attached to frame!'
- ' Please provide a weight or attach a weight to the'
- ' dataframe')
--
-+
- k = kwargs.pop('k', 5)
- t = kwargs.pop('t', 3)
- angle = kwargs.pop('angle', 135.0)
-@@ -1959,7 +1959,7 @@ class Headbanging_Median_Rate(object):
-
- hbt = Headbanging_Triples(data, w, k=k, t=t, angle=angle,
- edgecor=edgecor)
--
-+
- res = []
- for ename, bname in zip(e, b):
- r = cls(df[ename], df[bname], hbt, **kwargs).r
---- a/pysal/weights/spatial_lag.py
-+++ b/pysal/weights/spatial_lag.py
-@@ -170,7 +170,7 @@ def lag_categorical(w, y, ties='tryself'
- for neighb, weight in diter(neighbors):
- vals[inty[w.id2i[neighb]]] += weight
- outidx = _resolve_ties(idx,inty,vals,neighbors,ties, w)
-- output[w.id2i[idx]] = keys[outidx]
-+ output[w.id2i[int(idx)]] = keys[int(outidx)]
- return output.reshape(orig_shape)
-
- def _resolve_ties(i,inty,vals,neighbors,method,w):
---- a/pysal/spatial_dynamics/util.py
-+++ b/pysal/spatial_dynamics/util.py
-@@ -12,14 +12,14 @@ def shuffle_matrix(X, ids):
-
- Parameters
- ----------
-- X : array
-+ X : array
- (k, k), array to be permutated.
- ids : array
- range (k, ).
-
- Returns
- -------
-- X : array
-+ X : array
- (k, k) with rows and columns randomly shuffled.
-
- Examples
-@@ -50,7 +50,7 @@ def get_lower(matrix):
- Returns
- -------
- lowvec : array
-- numpy array, the lower half of the distance matrix flattened into
-+ numpy array, the lower half of the distance matrix flattened into
- a vector of length n*(n-1)/2.
-
- Examples
-@@ -75,6 +75,6 @@ def get_lower(matrix):
- if i > j:
- lowerlist.append(matrix[i, j])
- veclen = n * (n - 1) / 2
-- lowvec = np.reshape(lowerlist, (veclen, 1))
-+ lowvec = np.reshape(np.array(lowerlist), (int(veclen), 1))
- return lowvec
-
diff --git a/debian/patches/series b/debian/patches/series
deleted file mode 100644
index 630051b..0000000
--- a/debian/patches/series
+++ /dev/null
@@ -1,2 +0,0 @@
-unusual-interpreter.patch
-fix-for-numpy-bump-induced-breakage.patch
diff --git a/debian/patches/unusual-interpreter.patch b/debian/patches/unusual-interpreter.patch
deleted file mode 100644
index 9ae37e5..0000000
--- a/debian/patches/unusual-interpreter.patch
+++ /dev/null
@@ -1,12 +0,0 @@
-Description: Fix unusual interpreter path.
-Author: Bas Couwenberg <sebastic at debian.org>
-Forwarded: https://github.com/pysal/pysal/pull/888
-
---- a/pysal/contrib/network/klincs.py
-+++ b/pysal/contrib/network/klincs.py
-@@ -1,4 +1,4 @@
--#!/usr/env python
-+#!/usr/bin/env python
-
- """
- A library for computing local K function for network-constrained data
--
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/pkg-grass/pysal.git
More information about the Pkg-grass-devel
mailing list