-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathpytorch_isotonic.py
95 lines (84 loc) · 3.43 KB
/
pytorch_isotonic.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
# Copyright 2007-2020 The scikit-learn developers.
# Copyright 2020 Google LLC.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# a. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# b. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# c. Neither the name of the Scikit-learn Developers nor the names of
# its contributors may be used to endorse or promote products
# derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
# DAMAGE.
import torch
"""Isotonic optimization routines in Numba."""
def isotonic_l2(y, sol):
"""Solves an isotonic regression problem using PAV.
Formally, it solves argmin_{v_1 >= ... >= v_n} 0.5 ||v - y||^2.
Args:
y: input to isotonic regression, a 1d-array.
sol: where to write the solution, an array of the same size as y.
"""
n = y.size(1)
target = torch.stack(list(map(lambda x: torch.arange(n).cuda(), range(y.size(0))))).cuda()
c = torch.ones_like(y).cuda()
sums = torch.zeros_like(y).cuda()
print("Meow")
# target describes a list of blocks. At any time, if [i..j] (inclusive) is
# an active block, then target[i] := j and target[j] := i.
for i in range(n):
sol[i] = y[:,i]
sums[i] = y[:,i]
i = 0
while i < n:
k = target[i] + 1
if k == n:
break
if sol[i] > sol[k]:
i = k
continue
sum_y = sums[i]
sum_c = c[i]
while True:
# We are within an increasing subsequence.
prev_y = sol[k]
sum_y += sums[k]
sum_c += c[k]
k = target[k] + 1
if k == n or prev_y > sol[k]:
# Non-singleton increasing subsequence is finished,
# update first entry.
sol[i] = sum_y / sum_c
sums[i] = sum_y
c[i] = sum_c
target[i] = k - 1
target[k - 1] = i
if i > 0:
# Backtrack if we can. This makes the algorithm
# single-pass and ensures O(n) complexity.
i = target[i - 1]
# Otherwise, restart from the same point.
break
# Reconstruct the solution.
i = 0
while i < n:
k = target[i] + 1
sol[i + 1: k] = sol[i]
i = k
return sol