diff --git a/api/docs/apis-build.openshift.io/v1.BuildConfig.adoc b/api/docs/apis-build.openshift.io/v1.BuildConfig.adoc
index 1d83853c169d..b58d67089fcd 100644
--- a/api/docs/apis-build.openshift.io/v1.BuildConfig.adoc
+++ b/api/docs/apis-build.openshift.io/v1.BuildConfig.adoc
@@ -351,6 +351,7 @@ Populated by the system. Read-only. More info: http://kubernetes.io/docs/user-gu
status:
lastVersion:
diff --git a/api/protobuf-spec/github_com_openshift_api_build_v1.proto b/api/protobuf-spec/github_com_openshift_api_build_v1.proto
index 2f6b987b077a..e4b3b50e961a 100644
--- a/api/protobuf-spec/github_com_openshift_api_build_v1.proto
+++ b/api/protobuf-spec/github_com_openshift_api_build_v1.proto
@@ -780,6 +780,9 @@ message ImageChangeTrigger {
// will be used. Only one ImageChangeTrigger with an empty From reference is allowed in
// a build configuration.
optional k8s.io.api.core.v1.ObjectReference from = 2;
+
+ // paused is true if this trigger is temporarily disabled. Optional.
+ optional bool paused = 3;
}
// ImageLabel represents a label applied to the resulting image.
diff --git a/api/swagger-spec/oapi-v1.json b/api/swagger-spec/oapi-v1.json
index b9b072cd6304..b424e2c27fd7 100644
--- a/api/swagger-spec/oapi-v1.json
+++ b/api/swagger-spec/oapi-v1.json
@@ -24248,6 +24248,10 @@
"from": {
"$ref": "v1.ObjectReference",
"description": "from is a reference to an ImageStreamTag that will trigger a build when updated It is optional. If no From is specified, the From image from the build strategy will be used. Only one ImageChangeTrigger with an empty From reference is allowed in a build configuration."
+ },
+ "paused": {
+ "type": "boolean",
+ "description": "paused is true if this trigger is temporarily disabled. Optional."
}
}
},
diff --git a/api/swagger-spec/openshift-openapi-spec.json b/api/swagger-spec/openshift-openapi-spec.json
index 1ed29e1e4c00..0835a0885750 100644
--- a/api/swagger-spec/openshift-openapi-spec.json
+++ b/api/swagger-spec/openshift-openapi-spec.json
@@ -119637,6 +119637,10 @@
"lastTriggeredImageID": {
"description": "lastTriggeredImageID is used internally by the ImageChangeController to save last used image ID for build",
"type": "string"
+ },
+ "paused": {
+ "description": "paused is true if this trigger is temporarily disabled. Optional.",
+ "type": "boolean"
}
}
},
diff --git a/glide.lock b/glide.lock
index 3a633e2c4200..77cc122e6e07 100644
--- a/glide.lock
+++ b/glide.lock
@@ -1,5 +1,5 @@
-hash: fd0d3ccfd367c1a3eaf92bffa4738270b4478428cb8262e6cf84c26ab6269dbc
-updated: 2018-06-19T15:18:05.726174-04:00
+hash: 8b2712c6388649ccf0a6efb719c4cb3308e5632164816ccca2c65a03abb02a72
+updated: 2018-06-25T17:30:55.652399152+02:00
imports:
- name: bitbucket.org/ww/goautoneg
version: 75cd24fc2f2c2a2088577d12123ddee5f54e0675
@@ -480,10 +480,20 @@ imports:
version: 50b27dea7ebbfb052dfaf91681afc6fde28d8796
subpackages:
- encoding/dot
- - internal
+ - formats/dot
+ - formats/dot/ast
+ - formats/dot/internal/astx
+ - formats/dot/internal/errors
+ - formats/dot/internal/lexer
+ - formats/dot/internal/parser
+ - formats/dot/internal/token
+ - internal/linear
+ - internal/ordered
+ - internal/set
- path
- simple
- topo
+ - traverse
- name: github.com/gonum/internal
version: e57e4534cf9b3b00ef6c0175f59d8d2d34f60914
subpackages:
@@ -818,7 +828,7 @@ imports:
- go-selinux
- go-selinux/label
- name: github.com/openshift/api
- version: 0ce1df2db7debb15eddb25f3ae76df4180777221
+ version: 25fb8bd62e76dd664f4cebfdd7c4b56e4fcf652b
subpackages:
- apps/v1
- authorization/v1
@@ -946,7 +956,7 @@ imports:
- user/informers/externalversions/user/v1
- user/listers/user/v1
- name: github.com/openshift/imagebuilder
- version: 38229f93eea5ad4527ec60be7dd81267b80bb4ff
+ version: ad48a308429bc7e1530d26d70451e78d44233bd4
subpackages:
- dockerclient
- imageprogress
@@ -1814,7 +1824,7 @@ imports:
- pkg/util/proto
- pkg/util/proto/validation
- name: k8s.io/kubernetes
- version: 6d1dd08538863a8d01b84012b8fdff5b2e6f2240
+ version: 0d23de25150858a435f213d63e03564ad756b05d
repo: https://github.com/openshift/kubernetes.git
subpackages:
- cmd/controller-manager/app
diff --git a/pkg/build/apis/build/types.go b/pkg/build/apis/build/types.go
index 9eca34dcd84c..3a92c2b262d1 100644
--- a/pkg/build/apis/build/types.go
+++ b/pkg/build/apis/build/types.go
@@ -1125,6 +1125,9 @@ type ImageChangeTrigger struct {
// will be used. Only one ImageChangeTrigger with an empty From reference is allowed in
// a build configuration.
From *kapi.ObjectReference
+
+ // Paused is true if this trigger is temporarily disabled. Optional.
+ Paused bool
}
// BuildTriggerPolicy describes a policy for a single trigger that results in a new Build.
diff --git a/pkg/build/apis/build/v1/zz_generated.conversion.go b/pkg/build/apis/build/v1/zz_generated.conversion.go
index 03c92cf4e683..001fbea78919 100644
--- a/pkg/build/apis/build/v1/zz_generated.conversion.go
+++ b/pkg/build/apis/build/v1/zz_generated.conversion.go
@@ -1875,6 +1875,7 @@ func autoConvert_v1_ImageChangeTrigger_To_build_ImageChangeTrigger(in *v1.ImageC
} else {
out.From = nil
}
+ out.Paused = in.Paused
return nil
}
@@ -1894,6 +1895,7 @@ func autoConvert_build_ImageChangeTrigger_To_v1_ImageChangeTrigger(in *build.Ima
} else {
out.From = nil
}
+ out.Paused = in.Paused
return nil
}
diff --git a/pkg/image/trigger/buildconfigs/buildconfigs.go b/pkg/image/trigger/buildconfigs/buildconfigs.go
index 05c99b0d022f..7276544eac10 100644
--- a/pkg/image/trigger/buildconfigs/buildconfigs.go
+++ b/pkg/image/trigger/buildconfigs/buildconfigs.go
@@ -143,6 +143,10 @@ func (r *buildConfigReactor) ImageChanged(obj runtime.Object, tagRetriever trigg
if p == nil || (p.From != nil && p.From.Kind != "ImageStreamTag") {
continue
}
+ if p.Paused {
+ glog.V(5).Infof("Skipping paused build on bc: %s/%s for trigger: %s", bc.Namespace, bc.Name, t)
+ continue
+ }
var from *kapi.ObjectReference
if p.From != nil {
from = p.From
diff --git a/pkg/image/trigger/buildconfigs/buildconfigs_test.go b/pkg/image/trigger/buildconfigs/buildconfigs_test.go
index dc01ce240b71..a3432cce628f 100644
--- a/pkg/image/trigger/buildconfigs/buildconfigs_test.go
+++ b/pkg/image/trigger/buildconfigs/buildconfigs_test.go
@@ -242,6 +242,40 @@ func TestBuildConfigReactor(t *testing.T) {
map[string]string{"stream-1:1": "image-lookup-1"},
),
},
+
+ {
+ // won't fire because it is paused
+ tags: []fakeTagResponse{{Namespace: "other", Name: "stream-1:1", Ref: "image-lookup-1", RV: 2}},
+ obj: testBuildConfig([]buildapi.ImageChangeTrigger{
+ {
+ From: &kapi.ObjectReference{Name: "stream-1:1", Namespace: "other", Kind: "ImageStreamTag"},
+ Paused: true,
+ },
+ }),
+ },
+
+ {
+ // will fire only for unpaused if multiple triggers are resolved
+ tags: []fakeTagResponse{
+ {Namespace: "other", Name: "stream-1:1", Ref: "image-lookup-1", RV: 2},
+ {Namespace: "other", Name: "stream-2:1", Ref: "image-lookup-2", RV: 2},
+ },
+ obj: testBuildConfig([]buildapi.ImageChangeTrigger{
+ {
+ From: &kapi.ObjectReference{Name: "stream-1:1", Namespace: "other", Kind: "ImageStreamTag"},
+ Paused: true,
+ },
+ {
+ From: &kapi.ObjectReference{Name: "stream-2:1", Namespace: "other", Kind: "ImageStreamTag"},
+ },
+ }),
+ response: &buildapi.Build{},
+ expected: testBuildRequest(
+ &kapi.ObjectReference{Name: "stream-2:1", Namespace: "other", Kind: "ImageStreamTag"},
+ "image-lookup-2",
+ map[string]string{"stream-2:1": "image-lookup-2"},
+ ),
+ },
}
for i, test := range testCases {
diff --git a/pkg/openapi/zz_generated.openapi.go b/pkg/openapi/zz_generated.openapi.go
index 42641df82a83..75bf80925017 100644
--- a/pkg/openapi/zz_generated.openapi.go
+++ b/pkg/openapi/zz_generated.openapi.go
@@ -5775,6 +5775,13 @@ func schema_openshift_api_build_v1_ImageChangeTrigger(ref common.ReferenceCallba
Ref: ref("k8s.io/api/core/v1.ObjectReference"),
},
},
+ "paused": {
+ SchemaProps: spec.SchemaProps{
+ Description: "paused is true if this trigger is temporarily disabled. Optional.",
+ Type: []string{"boolean"},
+ Format: "",
+ },
+ },
},
},
},
diff --git a/vendor/github.com/gonum/graph/community/bisect.go b/vendor/github.com/gonum/graph/community/bisect.go
new file mode 100644
index 000000000000..7cd1dbd7ed81
--- /dev/null
+++ b/vendor/github.com/gonum/graph/community/bisect.go
@@ -0,0 +1,248 @@
+// Copyright ©2016 The gonum Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package community
+
+import (
+ "errors"
+ "fmt"
+ "math"
+ "math/rand"
+
+ "github.com/gonum/graph"
+)
+
+// Interval is an interval of resolutions with a common score.
+type Interval struct {
+ // Low and High delimit the interval
+ // such that the interval is [low, high).
+ Low, High float64
+
+ // Score is the score of the interval.
+ Score float64
+
+ // Reduced is the best scoring
+ // community membership found for the
+ // interval.
+ Reduced
+}
+
+// Reduced is a graph reduction.
+type Reduced interface {
+ // Communities returns the community
+ // structure of the reduction.
+ Communities() [][]graph.Node
+}
+
+// Size is a score function that is the reciprocal of the number of communities.
+func Size(g ReducedGraph) float64 { return 1 / float64(len(g.Structure())) }
+
+// Weight is a score function that is the sum of community weights. The concrete
+// type of g must be a pointer to a ReducedUndirected or a ReducedDirected, otherwise
+// Weight will panic.
+func Weight(g ReducedGraph) float64 {
+ var w float64
+ switch g := g.(type) {
+ case *ReducedUndirected:
+ for _, n := range g.nodes {
+ w += n.weight
+ }
+ case *ReducedDirected:
+ for _, n := range g.nodes {
+ w += n.weight
+ }
+ default:
+ panic(fmt.Sprintf("community: invalid graph type: %T", g))
+ }
+ return w
+}
+
+// ModularScore returns a modularized scoring function for Profile based on the
+// graph g and the given score function. The effort parameter determines how
+// many attempts will be made to get an improved score for any given resolution.
+func ModularScore(g graph.Graph, score func(ReducedGraph) float64, effort int, src *rand.Rand) func(float64) (float64, Reduced) {
+ return func(resolution float64) (float64, Reduced) {
+ max := math.Inf(-1)
+ var best Reduced
+ for i := 0; i < effort; i++ {
+ r := Modularize(g, resolution, src)
+ s := score(r)
+ if s > max {
+ max = s
+ best = r
+ }
+ }
+ return max, best
+ }
+}
+
+// SizeMultiplex is a score function that is the reciprocal of the number of communities.
+func SizeMultiplex(g ReducedMultiplex) float64 { return 1 / float64(len(g.Structure())) }
+
+// WeightMultiplex is a score function that is the sum of community weights. The concrete
+// type of g must be pointer to a ReducedUndirectedMultiplex or a ReducedDirectedMultiplex,
+// otherwise WeightMultiplex will panic.
+func WeightMultiplex(g ReducedMultiplex) float64 {
+ var w float64
+ switch g := g.(type) {
+ case *ReducedUndirectedMultiplex:
+ for _, n := range g.nodes {
+ for _, lw := range n.weights {
+ w += lw
+ }
+ }
+ case *ReducedDirectedMultiplex:
+ for _, n := range g.nodes {
+ for _, lw := range n.weights {
+ w += lw
+ }
+ }
+ default:
+ panic(fmt.Sprintf("community: invalid graph type: %T", g))
+ }
+ return w
+}
+
+// ModularMultiplexScore returns a modularized scoring function for Profile based
+// on the graph g and the given score function. The effort parameter determines how
+// many attempts will be made to get an improved score for any given resolution.
+func ModularMultiplexScore(g Multiplex, weights []float64, all bool, score func(ReducedMultiplex) float64, effort int, src *rand.Rand) func(float64) (float64, Reduced) {
+ return func(resolution float64) (float64, Reduced) {
+ max := math.Inf(-1)
+ var best Reduced
+ for i := 0; i < effort; i++ {
+ r := ModularizeMultiplex(g, weights, []float64{resolution}, all, src)
+ s := score(r)
+ if s > max {
+ max = s
+ best = r
+ }
+ }
+ return max, best
+ }
+}
+
+// Profile returns an approximate profile of score values in the resolution domain [low,high)
+// at the given granularity. The score is calculated by bisecting calls to fn. If log is true,
+// log space bisection is used, otherwise bisection is linear. The function fn should be
+// monotonically decreasing in at least 1/grain evaluations. Profile will attempt to detect
+// non-monotonicity during the bisection.
+//
+// Since exact modularity optimization is known to be NP-hard and Profile calls modularization
+// routines repeatedly, it is unlikely to return the exact resolution profile.
+func Profile(fn func(float64) (float64, Reduced), log bool, grain, low, high float64) (profile []Interval, err error) {
+ if low >= high {
+ return nil, errors.New("community: zero or negative width domain")
+ }
+
+ defer func() {
+ r := recover()
+ e, ok := r.(nonDecreasing)
+ if ok {
+ err = e
+ return
+ }
+ if r != nil {
+ panic(r)
+ }
+ }()
+ left, comm := fn(low)
+ right, _ := fn(high)
+ for i := 1; i < int(1/grain); i++ {
+ rt, _ := fn(high)
+ right = math.Max(right, rt)
+ }
+ profile = bisect(fn, log, grain, low, left, high, right, comm)
+
+ // We may have missed some non-monotonicity,
+ // so merge low score discordant domains into
+ // their lower resolution neighbours.
+ return fixUp(profile), nil
+}
+
+type nonDecreasing int
+
+func (n nonDecreasing) Error() string {
+ return fmt.Sprintf("community: profile does not reliably monotonically decrease: tried %d times", n)
+}
+
+func bisect(fn func(float64) (float64, Reduced), log bool, grain, low, scoreLow, high, scoreHigh float64, comm Reduced) []Interval {
+ if low >= high {
+ panic("community: zero or negative width domain")
+ }
+ if math.IsNaN(scoreLow) || math.IsNaN(scoreHigh) {
+ return nil
+ }
+
+ // Heuristically determine a reasonable number
+ // of times to try to get a higher value.
+ maxIter := int(1 / grain)
+
+ lowComm := comm
+ for n := 0; scoreLow < scoreHigh; n++ {
+ if n > maxIter {
+ panic(nonDecreasing(n))
+ }
+ scoreLow, lowComm = fn(low)
+ }
+
+ if scoreLow == scoreHigh || tooSmall(low, high, grain, log) {
+ return []Interval{{Low: low, High: high, Score: scoreLow, Reduced: lowComm}}
+ }
+
+ var mid float64
+ if log {
+ mid = math.Sqrt(low * high)
+ } else {
+ mid = (low + high) / 2
+ }
+
+ scoreMid := math.Inf(-1)
+ var midComm Reduced
+ for n := 0; scoreMid < scoreHigh; n++ {
+ if n > maxIter {
+ panic(nonDecreasing(n))
+ }
+ scoreMid, midComm = fn(mid)
+ }
+
+ lower := bisect(fn, log, grain, low, scoreLow, mid, scoreMid, lowComm)
+ higher := bisect(fn, log, grain, mid, scoreMid, high, scoreHigh, midComm)
+ for n := 0; lower[len(lower)-1].Score < higher[0].Score; n++ {
+ if n > maxIter {
+ panic(nonDecreasing(n))
+ }
+ lower[len(lower)-1].Score, lower[len(lower)-1].Reduced = fn(low)
+ }
+
+ if lower[len(lower)-1].Score == higher[0].Score {
+ higher[0].Low = lower[len(lower)-1].Low
+ lower = lower[:len(lower)-1]
+ if len(lower) == 0 {
+ return higher
+ }
+ }
+ return append(lower, higher...)
+}
+
+// fixUp non-monotonically decreasing interval scores.
+func fixUp(profile []Interval) []Interval {
+ max := profile[len(profile)-1].Score
+ for i := len(profile) - 2; i >= 0; i-- {
+ if profile[i].Score > max {
+ max = profile[i].Score
+ continue
+ }
+ profile[i+1].Low = profile[i].Low
+ profile = append(profile[:i], profile[i+1:]...)
+ }
+ return profile
+}
+
+func tooSmall(low, high, grain float64, log bool) bool {
+ if log {
+ return math.Log(high/low) < grain
+ }
+ return high-low < grain
+}
diff --git a/vendor/github.com/gonum/graph/community/bisect_test.go b/vendor/github.com/gonum/graph/community/bisect_test.go
new file mode 100644
index 000000000000..6f12a62acb24
--- /dev/null
+++ b/vendor/github.com/gonum/graph/community/bisect_test.go
@@ -0,0 +1,269 @@
+// Copyright ©2016 The gonum Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package community
+
+import (
+ "fmt"
+ "log"
+ "sort"
+ "testing"
+
+ "github.com/gonum/graph/internal/ordered"
+
+ "github.com/gonum/graph/simple"
+)
+
+func ExampleProfile_simple() {
+ // Create dumbell graph:
+ //
+ // 0 4
+ // |\ /|
+ // | 2 - 3 |
+ // |/ \|
+ // 1 5
+ //
+ g := simple.NewUndirectedGraph(0, 0)
+ for u, e := range smallDumbell {
+ for v := range e {
+ g.SetEdge(simple.Edge{F: simple.Node(u), T: simple.Node(v), W: 1})
+ }
+ }
+
+ // Get the profile of internal node weight for resolutions
+ // between 0.1 and 10 using logarithmic bisection.
+ p, err := Profile(ModularScore(g, Weight, 10, nil), true, 1e-3, 0.1, 10)
+ if err != nil {
+ log.Fatal(err)
+ }
+
+ // Print out each step with communities ordered.
+ for _, d := range p {
+ comm := d.Communities()
+ for _, c := range comm {
+ sort.Sort(ordered.ByID(c))
+ }
+ sort.Sort(ordered.BySliceIDs(comm))
+ fmt.Printf("Low:%.2v High:%.2v Score:%v Communities:%v Q=%.3v\n",
+ d.Low, d.High, d.Score, comm, Q(g, comm, d.Low))
+ }
+
+ // Output:
+ // Low:0.1 High:0.29 Score:14 Communities:[[0 1 2 3 4 5]] Q=0.9
+ // Low:0.29 High:2.3 Score:12 Communities:[[0 1 2] [3 4 5]] Q=0.714
+ // Low:2.3 High:3.5 Score:4 Communities:[[0 1] [2] [3] [4 5]] Q=-0.31
+ // Low:3.5 High:10 Score:0 Communities:[[0] [1] [2] [3] [4] [5]] Q=-0.607
+}
+
+var friends, enemies *simple.UndirectedGraph
+
+func init() {
+ friends = simple.NewUndirectedGraph(0, 0)
+ for u, e := range middleEast.friends {
+ // Ensure unconnected nodes are included.
+ if !friends.Has(simple.Node(u)) {
+ friends.AddNode(simple.Node(u))
+ }
+ for v := range e {
+ friends.SetEdge(simple.Edge{F: simple.Node(u), T: simple.Node(v), W: 1})
+ }
+ }
+ enemies = simple.NewUndirectedGraph(0, 0)
+ for u, e := range middleEast.enemies {
+ // Ensure unconnected nodes are included.
+ if !enemies.Has(simple.Node(u)) {
+ enemies.AddNode(simple.Node(u))
+ }
+ for v := range e {
+ enemies.SetEdge(simple.Edge{F: simple.Node(u), T: simple.Node(v), W: -1})
+ }
+ }
+}
+
+func ExampleProfile_multiplex() {
+ // The undirected graphs, friends and enemies, are the political relationships
+ // in the Middle East as described in the Slate article:
+ // http://www.slate.com/blogs/the_world_/2014/07/17/the_middle_east_friendship_chart.html
+ g, err := NewUndirectedLayers(friends, enemies)
+ if err != nil {
+ log.Fatal(err)
+ }
+ weights := []float64{1, -1}
+
+ // Get the profile of internal node weight for resolutions
+ // between 0.1 and 10 using logarithmic bisection.
+ p, err := Profile(ModularMultiplexScore(g, weights, true, WeightMultiplex, 10, nil), true, 1e-3, 0.1, 10)
+ if err != nil {
+ log.Fatal(err)
+ }
+
+ // Print out each step with communities ordered.
+ for _, d := range p {
+ comm := d.Communities()
+ for _, c := range comm {
+ sort.Sort(ordered.ByID(c))
+ }
+ sort.Sort(ordered.BySliceIDs(comm))
+ fmt.Printf("Low:%.2v High:%.2v Score:%v Communities:%v Q=%.3v\n",
+ d.Low, d.High, d.Score, comm, QMultiplex(g, comm, weights, []float64{d.Low}))
+ }
+
+ // Output:
+ // Low:0.1 High:0.72 Score:26 Communities:[[0] [1 7 9 12] [2 8 11] [3 4 5 10] [6]] Q=[24.7 1.97]
+ // Low:0.72 High:1.1 Score:24 Communities:[[0 6] [1 7 9 12] [2 8 11] [3 4 5 10]] Q=[16.9 14.1]
+ // Low:1.1 High:1.2 Score:18 Communities:[[0 2 6 11] [1 7 9 12] [3 4 5 8 10]] Q=[9.16 25.1]
+ // Low:1.2 High:1.6 Score:10 Communities:[[0 3 4 5 6 10] [1 7 9 12] [2 8 11]] Q=[11.4 24.1]
+ // Low:1.6 High:1.6 Score:8 Communities:[[0 1 6 7 9 12] [2 8 11] [3 4 5 10]] Q=[5.56 39.8]
+ // Low:1.6 High:1.8 Score:2 Communities:[[0 2 3 4 5 6 10] [1 7 8 9 11 12]] Q=[-1.82 48.6]
+ // Low:1.8 High:2.3 Score:-6 Communities:[[0 2 3 4 5 6 8 10 11] [1 7 9 12]] Q=[-5 57.5]
+ // Low:2.3 High:2.4 Score:-10 Communities:[[0 1 2 6 7 8 9 11 12] [3 4 5 10]] Q=[-11.2 79]
+ // Low:2.4 High:4.3 Score:-52 Communities:[[0 1 2 3 4 5 6 7 8 9 10 11 12]] Q=[-46.1 117]
+ // Low:4.3 High:10 Score:-54 Communities:[[0 1 2 3 4 6 7 8 9 10 11 12] [5]] Q=[-82 254]
+}
+
+func TestProfileUndirected(t *testing.T) {
+ for _, test := range communityUndirectedQTests {
+ g := simple.NewUndirectedGraph(0, 0)
+ for u, e := range test.g {
+ // Add nodes that are not defined by an edge.
+ if !g.Has(simple.Node(u)) {
+ g.AddNode(simple.Node(u))
+ }
+ for v := range e {
+ g.SetEdge(simple.Edge{F: simple.Node(u), T: simple.Node(v), W: 1})
+ }
+ }
+
+ fn := ModularScore(g, Weight, 10, nil)
+ p, err := Profile(fn, true, 1e-3, 0.1, 10)
+ if err != nil {
+ t.Errorf("%s: unexpected error: %v", test.name, err)
+ }
+
+ const tries = 1000
+ for i, d := range p {
+ var score float64
+ for i := 0; i < tries; i++ {
+ score, _ = fn(d.Low)
+ if score >= d.Score {
+ break
+ }
+ }
+ if score < d.Score {
+ t.Errorf("%s: failed to recover low end score: got: %v want: %v", test.name, score, d.Score)
+ }
+ if i != 0 && d.Score >= p[i-1].Score {
+ t.Errorf("%s: not monotonically decreasing: ", test.name, p[i-1], d)
+ }
+ }
+ }
+}
+
+func TestProfileDirected(t *testing.T) {
+ for _, test := range communityDirectedQTests {
+ g := simple.NewDirectedGraph(0, 0)
+ for u, e := range test.g {
+ // Add nodes that are not defined by an edge.
+ if !g.Has(simple.Node(u)) {
+ g.AddNode(simple.Node(u))
+ }
+ for v := range e {
+ g.SetEdge(simple.Edge{F: simple.Node(u), T: simple.Node(v), W: 1})
+ }
+ }
+
+ fn := ModularScore(g, Weight, 10, nil)
+ p, err := Profile(fn, true, 1e-3, 0.1, 10)
+ if err != nil {
+ t.Errorf("%s: unexpected error: %v", test.name, err)
+ }
+
+ const tries = 1000
+ for i, d := range p {
+ var score float64
+ for i := 0; i < tries; i++ {
+ score, _ = fn(d.Low)
+ if score >= d.Score {
+ break
+ }
+ }
+ if score < d.Score {
+ t.Errorf("%s: failed to recover low end score: got: %v want: %v", test.name, score, d.Score)
+ }
+ if i != 0 && d.Score >= p[i-1].Score {
+ t.Errorf("%s: not monotonically decreasing: ", test.name, p[i-1], d)
+ }
+ }
+ }
+}
+
+func TestProfileUndirectedMultiplex(t *testing.T) {
+ for _, test := range communityUndirectedMultiplexQTests {
+ g, weights, err := undirectedMultiplexFrom(test.layers)
+ if err != nil {
+ t.Errorf("unexpected error creating multiplex: %v", err)
+ continue
+ }
+
+ const all = true
+
+ fn := ModularMultiplexScore(g, weights, all, WeightMultiplex, 10, nil)
+ p, err := Profile(fn, true, 1e-3, 0.1, 10)
+ if err != nil {
+ t.Errorf("%s: unexpected error: %v", test.name, err)
+ }
+
+ const tries = 1000
+ for i, d := range p {
+ var score float64
+ for i := 0; i < tries; i++ {
+ score, _ = fn(d.Low)
+ if score >= d.Score {
+ break
+ }
+ }
+ if score < d.Score {
+ t.Errorf("%s: failed to recover low end score: got: %v want: %v", test.name, score, d.Score)
+ }
+ if i != 0 && d.Score >= p[i-1].Score {
+ t.Errorf("%s: not monotonically decreasing: ", test.name, p[i-1], d)
+ }
+ }
+ }
+}
+
+func TestProfileDirectedMultiplex(t *testing.T) {
+ for _, test := range communityDirectedMultiplexQTests {
+ g, weights, err := directedMultiplexFrom(test.layers)
+ if err != nil {
+ t.Errorf("unexpected error creating multiplex: %v", err)
+ continue
+ }
+
+ const all = true
+
+ fn := ModularMultiplexScore(g, weights, all, WeightMultiplex, 10, nil)
+ p, err := Profile(fn, true, 1e-3, 0.1, 10)
+ if err != nil {
+ t.Errorf("%s: unexpected error: %v", test.name, err)
+ }
+
+ const tries = 1000
+ for i, d := range p {
+ var score float64
+ for i := 0; i < tries; i++ {
+ score, _ = fn(d.Low)
+ if score >= d.Score {
+ break
+ }
+ }
+ if score < d.Score {
+ t.Errorf("%s: failed to recover low end score: got: %v want: %v", test.name, score, d.Score)
+ }
+ if i != 0 && d.Score >= p[i-1].Score {
+ t.Errorf("%s: not monotonically decreasing: ", test.name, p[i-1], d)
+ }
+ }
+ }
+}
diff --git a/vendor/github.com/gonum/graph/community/louvain.tex b/vendor/github.com/gonum/graph/community/louvain.tex
new file mode 100644
index 000000000000..046c02baf0ef
--- /dev/null
+++ b/vendor/github.com/gonum/graph/community/louvain.tex
@@ -0,0 +1,466 @@
+% Copyright ©2015 The gonum Authors. All rights reserved.
+% Use of this source code is governed by a BSD-style
+% license that can be found in the LICENSE file.
+
+\documentclass{article}
+
+\usepackage{amsmath,amsfonts}
+\usepackage[margin=4cm]{geometry}
+
+\title{Louvain algorithm for undirected and directed graphs}
+\author{The {\tt gonum} Authors}
+
+\begin{document}
+
+\maketitle
+
+The algorithm attempts to find communities (highly connected sub-graphs),
+and it does this by minimising the modularity function
+\begin{equation}
+ Q(c) = \frac{1}{2m}\sum_i\sum_j\left[ A_{ij} - \gamma \frac{k_ik_j}{2m} \right] \delta_{ij}(c),
+\end{equation}
+where $c$ is a partition of nodes into subsets or communities,
+$A_{ij}$ is the edge weight between nodes $i$ and $j$,
+$\gamma$ is a tuning parameter,
+\begin{equation}
+m = \frac{1}{2}\sum_i\sum_jA_{ij},
+\end{equation}
+\begin{equation}
+k_i = \sum_j{A_{ij}},
+\end{equation}
+and
+\begin{equation}
+ \delta_{ij}(c) = \left \{ \begin{array}{ll}
+ 1 & \text{if} \quad c(i) = c(j) \\
+ 0 & \text{otherwise} \end{array} \right ..
+\end{equation}
+Here $c(i)$ denotes the community to which node $i$ belongs
+in the partitioning $c$.
+
+The algorithm finds a hierarchical community structure by iterating
+between two phases:
+\begin{enumerate}
+ \item Find a set of communities that minimise $Q$.
+ \item Construct a new graph, whose nodes are the communities
+ found in the preceding phase one step.
+\end{enumerate}
+Each iteration of these two phases is called a `pass'.
+In this way, the algorithm obtains a nested community structure,
+where at each level $Q$ is minimised for the relevant graph.
+We consider this process in more detail, in particular looking
+at phase one first in the first pass, when each node is a single
+node, and then how this generalises to later passes when each node
+is a community.
+
+\section{Undirected Graphs}
+
+\subsection{Initial Pass}
+\label{sec:initialPass}
+
+The initial pass is simple as the initial pass uses the original graph,
+and in all following passes graphs constructed in the previous pass's
+phase two are used.
+Here we will consider this initial simple formulation for phase one, and
+in Section~\ref{sec:laterPasses} we consider how this generalises for
+passes two and onwards.
+Phase one works by initially allocating each node to a separate community,
+and then iterating through each node $a$ and checking if moving it into
+a different community $\beta$ will reduce $Q$.
+If there are possible moves that will reduce $Q$, $a$ is moved into the
+the community which will generate the largest reduction in $Q$.
+This process is continued until there are no moves left to reduce $Q$
+further, meaning a local minimum for $Q$ has been achieved.
+Then the algorithm moves to phase two (constructing a new graph where
+each node in the new graph is a community in the old graph).
+
+Note that we assume the original graph to be simple and undirected.
+First, we introduce some notation that will be useful:
+Let $c(i)$ denote the community to which node $i$ belongs,
+and let $\alpha$ be the community that the node $a$ mentioned above
+belongs to, i.e., $\alpha = c_a$.
+Then we define
+\newcommand{\Stot}[1]{\Sigma_{\text{tot}}^{#1}}
+\begin{equation}
+ \Stot{\alpha} = \sum_{i \in \alpha}\sum_{j}A_{ij} = \sum_{i \in \alpha}k_i,
+\end{equation}
+\newcommand{\kin}[2]{k_{#1}^{#2}}
+\begin{equation}
+ \kin{i}{\alpha} = \sum_{j \in \alpha}A_{ij},
+\end{equation}
+and
+\newcommand{\Sin}[1]{\Sigma_{\text{in}}^{#1}}
+\begin{equation}
+ \Sin{\alpha} = \sum_{i \in \alpha}\sum_{j \in \alpha}A_{ij} = \sum_{i \in \alpha}\kin{i}{\alpha}.
+\end{equation}
+
+We are interested in how $Q$ will change if we move a node $a$ from its
+current community $\alpha$, to a new community $\beta$.
+This will have two effects, it will remove the terms from $Q$
+related to $a$ in $\alpha$, which we will call $Q^-$ and it will add terms
+related to $a$ in $\beta$, which we will call $Q^+$.
+The total change in $Q$ caused by the movement of $a$ from $\alpha$ to $\beta$ is
+\begin{equation}
+ \Delta Q = Q^{+} - Q^{-},
+\end{equation}
+where
+\begin{align*}
+Q^- &= \frac{1}{2m}\left[ \left( A_{aa} - \gamma \frac{k_a^2}{2m} \right)
++ 2\sum_{i \in \alpha, \, i \neq a} \left( A_{ia} - \gamma \frac{k_ik_a}{2m} \right) \right] \\
+ &= \frac{1}{2m}\left[ \left( A_{aa} - \gamma \frac{k_a^2}{2m} \right)
++ 2 \left( \kin{a}{\alpha} -A_{aa}\right) - \gamma \frac{2k_a}{2m}\sum_{i \in \alpha, \, i \neq a} k_i \right] \\
+ &= \frac{1}{2m}\left[ \left( A_{aa} - \gamma \frac{k_a^2}{2m} \right)
++ 2 \left( \kin{a}{\alpha} -A_{aa}\right) - \gamma \frac{2k_a}{2m}\left( \Stot{\alpha} - k_a \right) \right], \\
+\end{align*}
+and
+\begin{align*}
+Q^+ &= \frac{1}{2m}\left[ \left( A_{aa} - \gamma \frac{k_a^2}{2m} \right)
++ 2\sum_{i \in \beta} \left( A_{ia} - \gamma \frac{k_ik_a}{2m} \right) \right] \\
+ &= \frac{1}{2m}\left[ \left( A_{aa} - \gamma \frac{k_a^2}{2m} \right)
++ 2\kin{a}{\beta} - \gamma \frac{2k_a}{2m}\sum_{i \in \beta} k_i \right] \\
+ &= \frac{1}{2m}\left[ \left( A_{aa} - \gamma \frac{k_a^2}{2m} \right)
++ 2\kin{a}{\beta} - \gamma \frac{2k_a\Stot{\beta}}{2m} \right]. \\
+\end{align*}
+The first term in both these expressions ($Q^-$ and $Q^+$) is the same, and so cancels:
+\begin{equation}
+\Delta Q = \frac{1}{2m}\left[ \left( 2\kin{a}{\beta} - \gamma \frac{2k_a\Stot{\beta}}{2m} \right)
+ - \left( 2 \left( \kin{a}{\alpha} -A_{aa}\right) - \gamma \frac{2k_a}{2m}\left( \Stot{\alpha} - k_a \right) \right) \right].
+\end{equation}
+
+\subsection{Later Passes}
+\label{sec:laterPasses}
+
+In phase two a `meta-graph' is constructed where nodes correspond to
+the communities found in the preceding phase one step, and edge weight
+between two such communities (nodes, in the meta-graph)
+$\alpha$ and $\beta$ are defined to be
+\begin{equation}
+ A_{\alpha \beta}^* = \sum_{i \in \alpha}\sum_{j \in \beta}A_{ij}.
+ \label{eqn:Aij*}
+\end{equation}
+Note that $i$ and $j$ refer to nodes in the original graph, not nodes
+in the previous graph, and so holds any meta-graph, not just the first.
+Also note that this definition of $A^*_{\alpha \beta}$ allows for
+$A^*_{\alpha \alpha}$ to be non-zero as
+\begin{equation}
+A_{\alpha \alpha}^* = \sum_{i \in \alpha}\sum_{j \in \alpha}A_{ij} = \Sin{\alpha}.
+\end{equation}
+
+In this newly constructed graph, $\alpha$ and $\beta$ are nodes, but
+also refer to communities (sets of nodes) in the original graph, and I
+use these two interpretations interchangeably.
+This should be the only ambiguous bit of notation in this document, I hope.
+
+The results of Section~\ref{sec:initialPass} generalise to these meta-graphs,
+and the generalised results mirror those of Section~\ref{sec:initialPass} closely
+-- I distinguish the new results from those of Section~\ref{sec:initialPass} by a
+superscript $*$.
+I use $i$ and $j$ to denote nodes of the original graph as in Section~\ref{sec:initialPass},
+and use $z$ and $w$ to denote nodes of the meta-graph (communities of the original).
+I use analogous notation to Section~\ref{sec:initialPass}, $c^*(z)$, to
+denote the community to which node $z$ of the meta-graph belongs,
+and let $\mathfrak{a}$ be the community that the node $\alpha$ belongs to
+($c^*(\alpha) = \mathfrak{a}$), i.e.
+\begin{equation}
+ \mathfrak{a} = \{z | c^*(z) = c^*(\alpha) \}.
+\end{equation}
+
+Given this notation, we can observe that
+\begin{equation}
+m^* = \frac{1}{2}\sum_{z}\sum_{w}{A_{zw}^*} = \frac{1}{2}\sum_{z}\sum_{w}{\sum_{i \in z}\sum_{j \in w}A_{ij}} = \frac{1}{2}\sum_i\sum_jA_{ij} = m,
+\end{equation}
+\begin{equation}
+k_{z}^* = \sum_{w}{A_{zw}^*} = \sum_{w}{\sum_{i \in z}\sum_{j \in w}A_{ij}} = \sum_{i \in z}\sum_{j}A_{ij} = \Stot{z},
+\end{equation}
+\begin{equation}
+ \Stot{\mathfrak{a} *} = \sum_{z \in \mathfrak{a}}\sum_{w}A_{zw}^* = \sum_{z \in \mathfrak{a}}k_z^* = \sum_{z \in \mathfrak{a}}\Stot{z},
+\end{equation}
+\begin{equation}
+ \kin{z}{\mathfrak{a} *} = \sum_{w \in \mathfrak{a}}{A_{zw}^*} = \sum_{w \in \mathfrak{a}}{\sum_{i \in z}\sum_{j \in w}A_{ij}},
+\end{equation}
+and
+\begin{equation}
+\Sin{\mathfrak{a} *} = \sum_{z \in \mathfrak{a}}\sum_{w \in \mathfrak{a}}A_{zw}^* = \sum_{z \in \mathfrak{a}}\kin{z}{\mathfrak{a} *} = \sum_{z \in \mathfrak{a}}\sum_{w \in \mathfrak{a}}{\sum_{i \in z}\sum_{j \in w}A_{ij}}.
+ %\label{eqn:Sin}
+\end{equation}
+
+If we let $\mathfrak{b}$ denote the community to which we are considering moving $\alpha$,
+then the expression for $\Delta Q$ from Section~\ref{sec:initialPass} trivially generalises to
+\begin{equation}
+\Delta Q = \frac{1}{2m}\left[ \left( 2 \kin{\alpha}{\mathfrak{b} *} - \gamma \frac{2k_{\alpha}^*\Stot{\mathfrak{b} *}}{2m} \right)
+ - \left( 2\left( \kin{\alpha}{\mathfrak{a} *} - A_{\alpha \alpha}^* \right) - \gamma \frac{2k_{\alpha}^*}{2m} \left( \Stot{\mathfrak{a} *} - k_{\alpha}^* \right ) \right) \right] \\
+\end{equation}
+
+\section{Directed Graphs}
+\label{sec:directedGraphs}
+
+It is of interest to consider how this generalises to directed graphs.
+If we are to treat incoming and outgoing nodes equally, there are several
+thoughts on how to extend the algorithm to directed graphs, of which we
+will explore three:
+\begin{itemize}
+ \item Construct an undirected graph first, and then use the undirected case.
+ \item Generalise the expressions from the undirected case to the directed case,
+ we will consider two different suggestions for such generalisations.
+\end{itemize}
+We will show that one of the two `generalisation of expressions' approaches is
+equivalent to constructing an undirected graph, and the other is not.
+
+\subsection{Construction of an undirected graph}
+A simple approach to generalising to directed graphs is to construct
+an undirected graph with edge weights
+\begin{equation}
+A_{ij} = B_{ij} + B_{ji},
+\label{eqn:undirectedAB}
+\end{equation}
+and simply use the undirected algorithm.
+Another suggestion is to average the directed edges to make
+an undirected graph, i.e. to use a directed graph with edge weights
+\begin{equation}
+A_{ij} = \frac{B_{ij} + B_{ji}}{2}.
+\end{equation}
+This raises an important question: does scaling all edge weights across
+the entire graph by a constant affect the results of the algorithm?
+Hopefully not, but worth checking.
+We can follow this through the results for the undirected graph by
+substituting $A_{ij}^{(1)} = pA_{ij}$, $p \in \mathbb{R}$, and
+distinguishing the new expressions by a superscript ${(1)}$. These
+new expressions are:
+\begin{equation}
+m^{(1)} = \frac{1}{2}\sum_i\sum_jpA_{ij} = p\frac{1}{2}\sum_i\sum_j A_{ij} = pm ,
+\end{equation}
+\begin{equation}
+k_i^{(1)} = \sum_j{pA_{ij}} = p\sum_j{A_{ij}} = pk_i,
+\end{equation}
+and so
+\begin{align*}
+ Q^{(1)}(c) &= \frac{1}{2pm}\sum_i\sum_j\left[ pA_{ij} - \gamma \frac{pk_ipk_j}{2pm} \right] \delta_{ij}(c) \\
+ &= \frac{1}{2m}\sum_i\sum_j\left[ A_{ij} - \gamma \frac{k_ik_j}{2m} \right] \delta_{ij}(c) \\
+ &= Q(c)
+\end{align*}
+Note that as we have shown $Q^{(1)} = Q$ there is no need to go into the remainder of the terms
+involved in the algorithm, as they all derive from $Q$.
+
+\subsection{First generalisation of expressions approach}
+
+One suggested extension to directed graphs is to modify the expressions
+involved by adding the `from' case and the `to' case for each term.
+If we let $B_{ij}$ be the edge weight between nodes $i$ and $j$ in
+the directed graph, and distinguishing these extended expressions by
+a superscript $(2)$, the extended expressions become:
+\begin{equation}
+m^{(2)} = \frac{1}{2}\left ( \sum_i\sum_jB_{ij} + \sum_i\sum_jB_{ji}\right) = \frac{1}{2}\sum_i\sum_j \left( B_{ij} + B_{ji} \right) ,
+\end{equation}
+\begin{equation}
+k_i^{(2)} = \sum_jB_{ij} + \sum_jB_{ji} = \sum_j{\left( B_{ij} + B_{ji} \right)},
+\end{equation}
+and similarly
+\begin{equation}
+ Q^{(2)}(c) = \frac{1}{2m}\sum_i\sum_j\left[ \left( B_{ij} + B_{ji} \right) - \gamma \frac{k_i^{(2)}k_j^{(2)}}{2m} \right] \delta_{ij}(c).
+\end{equation}
+
+Note how this is equivalent to the construction of an undirected graph as
+per Equation~(\ref{eqn:undirectedAB}). Similarly to above,
+there is no need to go into the remainder of the terms
+involved in the algorithm, as they all derive from $Q$.
+
+
+\subsection{Second generalisation of expressions approach}
+
+Another approach to generalising the expressions to the
+directed case, that still treats incoming and outgoing edges
+as equally important, is to propose an alternative modularity
+expression:
+\newcommand{\dkin}[1]{k_{#1}^{\text{in}}}
+\newcommand{\dkout}[1]{k_{#1}^{\text{out}}}
+\begin{equation}
+Q^{(3)}(c) = \frac{1}{2m}\sum_i\sum_j\left[ 2B_{ij} - 2\gamma \frac{\dkin{i}\dkout{j}}{2m} \right] \delta_{ij}(c), \\
+\end{equation}
+where
+\begin{equation}
+\dkout{i} = \sum_j{B_{ij}}
+\quad \quad \text{and} \quad \quad
+\dkin{i} = \sum_j{B_{ji}},
+\end{equation}
+so $k_i^{(2)} = \dkin{i} + \dkout{i}$.
+Note I leave the factor of two in the expression for $Q^{(3)}$ so that it
+remains as comparable to that for $Q^{(2)}$ as possible.
+There is no need for alternative $m$, as it will still be the same as above.
+$Q^{(3)}$ will differ from $Q^{(2)}$ in two ways.
+Firstly, as $k_i^{(2)} = \dkin{i} + \dkout{i}$,
+\begin{align*}
+\sum_i\sum_j k_i^{(2)} k_j^{(2)} \delta_{ij}(c) &= \sum_i\sum_j (\dkin{i} + \dkout{i}) (\dkin{j} + \dkout{j}) \delta_{ij}(c) \\
+ &= \sum_i\sum_j \left[ (\dkin{i}\dkin{j} + \dkout{i}\dkout{j}) + (\dkin{i}\dkout{j} + \dkin{j}\dkout{i}) \right] \delta_{ij}(c). \\
+ &= \sum_i\sum_j \left[ (\dkin{i}\dkin{j} + \dkout{i}\dkout{j}) + 2\dkin{i}\dkout{j} \right] \delta_{ij}(c), \\
+\end{align*}
+and similarly,
+\begin{equation}
+\sum_i\sum_j \left( B_{ij} + B_{ji} \right) \delta_{ij}(c) = 2\sum_i\sum_j B_{ij} \delta_{ij}(c).
+\end{equation}
+From these two expressions, we can see that
+\begin{equation}
+Q^{(3)} - Q^{(2)} = \frac{1}{2m}\sum_i\sum_j \gamma \frac{\dkin{i}\dkin{j} + \dkout{i}\dkout{j}}{2m} \delta_{ij}(c).
+\end{equation}
+
+
+\section{Directed Graphs in more detail}
+\label{sec:directedGraphsDetail}
+
+In Section \ref{sec:directedGraphs} we essentially showed three
+things:
+\begin{itemize}
+ \item How an undirected graph could be constructed from a directed
+ graph, thereby allowing the undirected algorithm to be used for
+ directed graphs.
+ \item How scaling all edge weights by a non-zero constant would not
+ affect the modularity function.
+ \item An alternative approach to extending the algorithm to
+ directed graphs that is not equivalent to first reducing it
+ to an undirected graph.
+\end{itemize}
+It is this third point that we will explore here.
+Analogously to Sections \ref{sec:initialPass} and \ref{sec:laterPasses} we will
+break this up into the initial pass and the later passes.
+
+\subsection{Initial pass}
+\label{sec:initialPassDirected}
+
+Continuing with the notation of Section \ref{sec:initialPass}, in which
+$c(i)$ denotes the community to which node $i$ belongs,
+and $\alpha = c(a)$, we define
+\newcommand{\dinStot}[1]{\Sigma_{\text{tot}}^{\text{in}(#1)}}
+\newcommand{\doutStot}[1]{\Sigma_{\text{tot}}^{\text{out}(#1)}}
+\begin{equation}
+ \doutStot{\alpha} = \sum_{i \in \alpha}\sum_{j}B_{ij} = \sum_{i \in \alpha}\dkout{i}
+ \quad \quad \text{and} \quad \quad
+ \dinStot{\alpha} = \sum_{i \in \alpha}\sum_{j}B_{ji} = \sum_{i \in \alpha}\dkin{i},
+\end{equation}
+\newcommand{\dinkin}[2]{k_{#1}^{\text{in}(#2)}}
+\newcommand{\doutkin}[2]{k_{#1}^{\text{out}(#2)}}
+\begin{equation}
+ \doutkin{i}{\alpha} = \sum_{j \in \alpha}B_{ij}
+ \quad \quad \text{and} \quad \quad
+ \dinkin{i}{\alpha} = \sum_{j \in \alpha}B_{ji},
+\end{equation}
+and we will entertain one more ambiguous notation choice:
+%\newcommand{\Sin}[1]{\Sigma_{\text{in}}^{#1}}
+\begin{equation}
+ \Sin{\alpha} = \sum_{i \in \alpha}\sum_{j \in \alpha}B_{ij} = \sum_{i \in \alpha}\doutkin{i}{\alpha} = \sum_{i \in \alpha}\dinkin{i}{\alpha}.
+\end{equation}
+
+Analogously to Section \ref{sec:initialPass}, we are interested in how
+$Q^{(3)}$ will change if we move a node $a$ from its
+current community $\alpha$, to a new community $\beta$,
+and analogously this will have two effects -- it will remove the terms
+from $Q^{(3)}$ related to $a$ in $\alpha$, which we will call $Q^{-(3)}$
+and it will add terms related to $a$ in $\beta$, which we will call $Q^{+(3)}$.
+The total change in $Q^{(3)}$ caused by the movement of $a$ from $\alpha$ to $\beta$ is
+\begin{equation}
+ \Delta Q^{(3)} = Q^{+(3)} - Q^{-(3)},
+\end{equation}
+where
+\begin{align*}
+Q^{-(3)} &= \frac{1}{2m}\left[ \left( 2B_{aa} - 2\gamma \frac{\dkin{a}\dkout{a}}{2m} \right)
++ \sum_{i \in \alpha, \, i \neq a} \left( 2B_{ia} + 2B_{ai} - 2\gamma \frac{\dkin{i}\dkout{a}}{2m} - 2\gamma \frac{\dkin{a}\dkout{i}}{2m} \right) \right] \\
+ &= \frac{1}{2m}\left[ \left( 2B_{aa} - 2\gamma \frac{\dkin{a}\dkout{a}}{2m} \right)
++ 2(\dinkin{a}{\alpha} - B_{aa}) + 2(\doutkin{a}{\alpha} - B_{aa}) \hdots \right . \\
+ & \quad \quad \quad \quad \quad \quad \left .
+- \frac{2\gamma\dkout{a}}{2m} (\dinStot{\alpha} - \dkin{a}) - \frac{2\gamma\dkin{a}}{2m} (\doutStot{\alpha} - \dkout{a}) \right] \\
+\end{align*}
+and
+\begin{align*}
+Q^{+(3)} &= \frac{1}{2m}\left[ \left( 2B_{aa} - 2\gamma \frac{\dkin{a}\dkout{a}}{2m} \right)
++ \sum_{i \in \beta} \left( 2B_{ia} + 2B_{ai} - 2\gamma \frac{\dkin{i}\dkout{a}}{2m} - 2\gamma \frac{\dkin{a}\dkout{i}}{2m} \right) \right] \\
+ &= \frac{1}{2m}\left[ \left( 2B_{aa} - 2\gamma \frac{\dkin{a}\dkout{a}}{2m} \right)
++ 2\dinkin{a}{\beta} + 2\doutkin{a}{\beta} - \frac{2\gamma\dkout{a}}{2m} \dinStot{\beta} - \frac{2\gamma\dkin{a}}{2m} \doutStot{\beta} \right] \\
+\end{align*}
+Similarly to Section \ref{sec:initialPass}, the first term in both these expressions is the same, and so cancels, leaving:
+\begin{align*}
+\Delta Q^{(3)} &= \frac{2}{2m}\left[
+\left( \dinkin{a}{\beta} + \doutkin{a}{\beta} - \frac{\gamma\dkout{a}}{2m} \dinStot{\beta} - \frac{\gamma\dkin{a}}{2m} \doutStot{\beta} \right) \right. \\
+& \hspace{-1cm}
+- \left. \left( (\dinkin{a}{\alpha} - B_{aa}) + (\doutkin{a}{\alpha} - B_{aa}) - \frac{\gamma\dkout{a}}{2m} (\dinStot{\alpha} - \dkin{a}) - \frac{\gamma\dkin{a}}{2m} (\doutStot{\alpha} - \dkout{a}) \right) \right] \\
+ &= \frac{2}{2m}\left[ (\dinkin{a}{\beta}-\dinkin{a}{\alpha}) + (\doutkin{a}{\beta}-\doutkin{a}{\alpha}) + 2B_{aa} \right. \\
+& \hspace{-1cm} \left.
+- \frac{\gamma\dkout{a}}{2m} (\dinStot{\beta}-\dinStot{\alpha}) - \frac{\gamma\dkin{a}}{2m} (\doutStot{\beta} - \doutStot{\alpha}) - \frac{2\gamma\dkin{a}\dkout{a}}{2m} \right]
+\end{align*}
+
+
+
+\subsection{Later passes}
+\label{sec:laterPassesDirected}
+
+In phase two a `meta-graph' is constructed where nodes correspond to
+the communities found in the preceding phase one step, and edge weight
+between two such communities (nodes, in the meta-graph)
+$\alpha$ and $\beta$ are defined to be
+\begin{equation}
+ B_{\alpha \beta}^* = \sum_{i \in \alpha}\sum_{j \in \beta}B_{ij}.
+ \label{eqn:Bij*}
+\end{equation}
+Note that $i$ and $j$ refer to nodes in the original graph, not nodes
+in the previous graph, and so holds any meta-graph, not just the first.
+Also note that this definition of $B^*_{\alpha \beta}$ allows for
+$B^*_{\alpha \alpha}$ to be non-zero, in fact
+\begin{equation}
+B_{\alpha \alpha}^* = \sum_{i \in \alpha}\sum_{j \in \alpha}B_{ij} = \Sin{\alpha}.
+\end{equation}
+
+In this newly constructed graph, $\alpha$ and $\beta$ are nodes, but
+also refer to communities (sets of nodes) in the original graph, and I
+use these two interpretations interchangeably, completely analogously to
+Section \ref{sec:laterPasses}.
+
+The results of Section~\ref{sec:initialPassDirected} generalise to these meta-graphs,
+and the generalised results mirror those of Section~\ref{sec:initialPassDirected} closely
+-- I distinguish the new results from those of Section~\ref{sec:initialPassDirected} by a
+superscript $*$.
+I use $i$ and $j$ to denote nodes of the original graph as in Sections~\ref{sec:initialPass}
+and \ref{sec:initialPassDirected},
+and use $z$ and $w$ to denote nodes of the meta-graph (communities of the original).
+I use analogous notation to Section~\ref{sec:initialPass}, $c^*(z)$, to
+denote the community to which node $z$ of the meta-graph belongs,
+and let $\mathfrak{a}$ be the community that the node $\alpha$ belongs to,
+i.e., $\mathfrak{a} = c^*(\alpha) $.
+
+Given this notation, we get all the same results as in \ref{sec:laterPasses}, but
+each split into two cases `out' and `in', separating by direction, essentially, so
+\newcommand{\dkinStar}[1]{k_{#1}^{\text{in} *}}
+\newcommand{\dkoutStar}[1]{k_{#1}^{\text{out} *}}
+\begin{equation}
+\dkoutStar{z} = \sum_w{B_{zw}^*} = \sum_w\sum_{i \in z}\sum_{j \in w}B_{ij} = \sum_{i \in z}\sum_jB_{ij} = \doutStot{z},
+\end{equation}
+\begin{equation}
+\dkinStar{z} = \sum_w{B_{wz}^*} = \sum_w\sum_{i \in z}\sum_{j \in w}B_{ji} = \sum_{i \in z}\sum_jB_{ji} = \dinStot{z},
+\end{equation}
+\newcommand{\dinStotStar}[1]{\Sigma_{\text{tot}}^{\text{in}(#1) *}}
+\newcommand{\doutStotStar}[1]{\Sigma_{\text{tot}}^{\text{out}(#1) *}}
+\begin{equation}
+ \doutStotStar{\mathfrak{a}} = \sum_{z \in \mathfrak{a}}\sum_{w}B_{zw}^* = \sum_{z \in \mathfrak{a}}\dkoutStar{z} = \sum_{z \in \mathfrak{a}}\doutStot{z},
+\end{equation}
+\begin{equation}
+ \dinStotStar{\mathfrak{a}} = \sum_{z \in \mathfrak{a}}\sum_{w}B_{wz}^* = \sum_{z \in \mathfrak{a}}\dkinStar{z} = \sum_{z \in \mathfrak{a}}\dinStot{z},
+\end{equation}
+\newcommand{\dinkinStar}[2]{k_{#1}^{\text{in}(#2) *}}
+\newcommand{\doutkinStar}[2]{k_{#1}^{\text{out}(#2) *}}
+\begin{equation}
+ \doutkinStar{z}{\mathfrak{a}} = \sum_{w \in \mathfrak{a}}{B_{zw}^*} = \sum_{w \in \mathfrak{a}}{\sum_{i \in z}\sum_{j \in w}B_{ij}},
+\end{equation}
+\begin{equation}
+ \dinkinStar{z}{\mathfrak{a}} = \sum_{w \in \mathfrak{a}}{B_{wz}^*} = \sum_{w \in \mathfrak{a}}{\sum_{i \in z}\sum_{j \in w}B_{ji}},
+\end{equation}
+and
+\begin{equation}
+\Sin{\mathfrak{a} *} = \sum_{z \in \mathfrak{a}}\sum_{w \in \mathfrak{a}}A_{zw}^* = \sum_{z \in \mathfrak{a}}\kin{z}{\mathfrak{a} *} = \sum_{z \in \mathfrak{a}}\sum_{w \in \mathfrak{a}}{\sum_{i \in z}\sum_{j \in w}A_{ij}}.
+ %\label{eqn:Sin}
+\end{equation}
+
+If we let $\mathfrak{b}$ denote the community to which we are considering moving $\alpha$,
+then the expression for $\Delta Q$ from Section~\ref{sec:initialPassDirected} simply generalises as
+\begin{align*}
+\Delta Q^{(3)} &= \frac{2}{2m}\left[ (\dinkinStar{\alpha}{\mathfrak{b}}-\dinkinStar{\alpha}{\mathfrak{a}}) + (\doutkinStar{\alpha}{\mathfrak{b}}-\doutkinStar{\alpha}{\mathfrak{a}}) + 2B_{\alpha\alpha}^* \right. \\
+& \hspace{-1cm} \left.
+- \frac{\gamma\dkoutStar{\alpha}}{2m} (\dinStotStar{\mathfrak{b}}-\dinStotStar{\mathfrak{a}}) - \frac{\gamma\dkinStar{\alpha}}{2m} (\doutStotStar{\mathfrak{b}} - \doutStotStar{\mathfrak{a}}) - \frac{2\gamma\dkinStar{\alpha}\dkoutStar{\alpha}}{2m} \right]
+\end{align*}
+
+
+\end{document}
diff --git a/vendor/github.com/gonum/graph/community/louvain_common.go b/vendor/github.com/gonum/graph/community/louvain_common.go
new file mode 100644
index 000000000000..e806096a0ee1
--- /dev/null
+++ b/vendor/github.com/gonum/graph/community/louvain_common.go
@@ -0,0 +1,377 @@
+// Copyright ©2015 The gonum Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package community provides graph community detection functions.
+package community
+
+import (
+ "fmt"
+ "math/rand"
+
+ "github.com/gonum/graph"
+)
+
+// Q returns the modularity Q score of the graph g subdivided into the
+// given communities at the given resolution. If communities is nil, the
+// unclustered modularity score is returned. The resolution parameter
+// is γ as defined in Reichardt and Bornholdt doi:10.1103/PhysRevE.74.016110.
+// Q will panic if g has any edge with negative edge weight.
+//
+// If g is undirected, Q is calculated according to
+// Q = 1/2m \sum_{ij} [ A_{ij} - (\gamma k_i k_j)/2m ] \delta(c_i,c_j),
+// If g is directed, it is calculated according to
+// Q = 1/m \sum_{ij} [ A_{ij} - (\gamma k_i^in k_j^out)/m ] \delta(c_i,c_j).
+//
+// graph.Undirect may be used as a shim to allow calculation of Q for
+// directed graphs with the undirected modularity function.
+func Q(g graph.Graph, communities [][]graph.Node, resolution float64) float64 {
+ switch g := g.(type) {
+ case graph.Undirected:
+ return qUndirected(g, communities, resolution)
+ case graph.Directed:
+ return qDirected(g, communities, resolution)
+ default:
+ panic(fmt.Sprintf("community: invalid graph type: %T", g))
+ }
+}
+
+// ReducedGraph is a modularised graph.
+type ReducedGraph interface {
+ graph.Graph
+
+ // Communities returns the community memberships
+ // of the nodes in the graph used to generate
+ // the reduced graph.
+ Communities() [][]graph.Node
+
+ // Structure returns the community structure of
+ // the current level of the module clustering.
+ // Each slice in the returned value recursively
+ // describes the membership of a community at
+ // the current level by indexing via the node
+ // ID into the structure of the non-nil
+ // ReducedGraph returned by Expanded, or when the
+ // ReducedGraph is nil, by containing nodes
+ // from the original input graph.
+ //
+ // The returned value should not be mutated.
+ Structure() [][]graph.Node
+
+ // Expanded returns the next lower level of the
+ // module clustering or nil if at the lowest level.
+ //
+ // The returned ReducedGraph will be the same
+ // concrete type as the receiver.
+ Expanded() ReducedGraph
+}
+
+// Modularize returns the hierarchical modularization of g at the given resolution
+// using the Louvain algorithm. If src is nil, rand.Intn is used as the random
+// generator. Modularize will panic if g has any edge with negative edge weight.
+//
+// If g is undirected it is modularised to minimise
+// Q = 1/2m \sum_{ij} [ A_{ij} - (\gamma k_i k_j)/2m ] \delta(c_i,c_j),
+// If g is directed it is modularised to minimise
+// Q = 1/m \sum_{ij} [ A_{ij} - (\gamma k_i^in k_j^out)/m ] \delta(c_i,c_j).
+//
+// The concrete type of the ReducedGraph will be a pointer to either a
+// ReducedUndirected or a ReducedDirected depending on the type of g.
+//
+// graph.Undirect may be used as a shim to allow modularization of
+// directed graphs with the undirected modularity function.
+func Modularize(g graph.Graph, resolution float64, src *rand.Rand) ReducedGraph {
+ switch g := g.(type) {
+ case graph.Undirected:
+ return louvainUndirected(g, resolution, src)
+ case graph.Directed:
+ return louvainDirected(g, resolution, src)
+ default:
+ panic(fmt.Sprintf("community: invalid graph type: %T", g))
+ }
+}
+
+// Multiplex is a multiplex graph.
+type Multiplex interface {
+ // Nodes returns the slice of nodes
+ // for the multiplex graph.
+ // All layers must refer to the same
+ // set of nodes.
+ Nodes() []graph.Node
+
+ // Depth returns the number of layers
+ // in the multiplex graph.
+ Depth() int
+}
+
+// QMultiplex returns the modularity Q score of the multiplex graph layers
+// subdivided into the given communities at the given resolutions and weights. Q is
+// returned as the vector of weighted Q scores for each layer of the multiplex graph.
+// If communities is nil, the unclustered modularity score is returned.
+// If weights is nil layers are equally weighted, otherwise the length of
+// weights must equal the number of layers. If resolutions is nil, a resolution
+// of 1.0 is used for all layers, otherwise either a single element slice may be used
+// to specify a global resolution, or the length of resolutions must equal the number
+// of layers. The resolution parameter is γ as defined in Reichardt and Bornholdt
+// doi:10.1103/PhysRevE.74.016110.
+// QMultiplex will panic if the graph has any layer weight-scaled edge with
+// negative edge weight.
+//
+// If g is undirected, Q is calculated according to
+// Q_{layer} = w_{layer} \sum_{ij} [ A_{layer}*_{ij} - (\gamma_{layer} k_i k_j)/2m_{layer} ] \delta(c_i,c_j),
+// If g is directed, it is calculated according to
+// Q_{layer} = w_{layer} \sum_{ij} [ A_{layer}*_{ij} - (\gamma_{layer} k_i^in k_j^out)/m_{layer} ] \delta(c_i,c_j).
+//
+// Note that Q values for multiplex graphs are not scaled by the total layer edge weight.
+//
+// graph.Undirect may be used as a shim to allow calculation of Q for
+// directed graphs.
+func QMultiplex(g Multiplex, communities [][]graph.Node, weights, resolutions []float64) []float64 {
+ if weights != nil && len(weights) != g.Depth() {
+ panic("community: weights vector length mismatch")
+ }
+ if resolutions != nil && len(resolutions) != 1 && len(resolutions) != g.Depth() {
+ panic("community: resolutions vector length mismatch")
+ }
+
+ switch g := g.(type) {
+ case UndirectedMultiplex:
+ return qUndirectedMultiplex(g, communities, weights, resolutions)
+ case DirectedMultiplex:
+ return qDirectedMultiplex(g, communities, weights, resolutions)
+ default:
+ panic(fmt.Sprintf("community: invalid graph type: %T", g))
+ }
+}
+
+// ReducedMultiplex is a modularised multiplex graph.
+type ReducedMultiplex interface {
+ Multiplex
+
+ // Communities returns the community memberships
+ // of the nodes in the graph used to generate
+ // the reduced graph.
+ Communities() [][]graph.Node
+
+ // Structure returns the community structure of
+ // the current level of the module clustering.
+ // Each slice in the returned value recursively
+ // describes the membership of a community at
+ // the current level by indexing via the node
+ // ID into the structure of the non-nil
+ // ReducedGraph returned by Expanded, or when the
+ // ReducedGraph is nil, by containing nodes
+ // from the original input graph.
+ //
+ // The returned value should not be mutated.
+ Structure() [][]graph.Node
+
+ // Expanded returns the next lower level of the
+ // module clustering or nil if at the lowest level.
+ //
+ // The returned ReducedGraph will be the same
+ // concrete type as the receiver.
+ Expanded() ReducedMultiplex
+}
+
+// ModularizeMultiplex returns the hierarchical modularization of g at the given resolution
+// using the Louvain algorithm. If all is true and g have negatively weighted layers, all
+// communities will be searched during the modularization. If src is nil, rand.Intn is
+// used as the random generator. ModularizeMultiplex will panic if g has any edge with
+// edge weight that does not sign-match the layer weight.
+//
+// If g is undirected it is modularised to minimise
+// Q = \sum w_{layer} \sum_{ij} [ A_{layer}*_{ij} - (\gamma_{layer} k_i k_j)/2m ] \delta(c_i,c_j).
+// If g is directed it is modularised to minimise
+// Q = \sum w_{layer} \sum_{ij} [ A_{layer}*_{ij} - (\gamma_{layer} k_i^in k_j^out)/m_{layer} ] \delta(c_i,c_j).
+//
+// The concrete type of the ReducedMultiplex will be a pointer to a
+// ReducedUndirectedMultiplex.
+//
+// graph.Undirect may be used as a shim to allow modularization of
+// directed graphs with the undirected modularity function.
+func ModularizeMultiplex(g Multiplex, weights, resolutions []float64, all bool, src *rand.Rand) ReducedMultiplex {
+ if weights != nil && len(weights) != g.Depth() {
+ panic("community: weights vector length mismatch")
+ }
+ if resolutions != nil && len(resolutions) != 1 && len(resolutions) != g.Depth() {
+ panic("community: resolutions vector length mismatch")
+ }
+
+ switch g := g.(type) {
+ case UndirectedMultiplex:
+ return louvainUndirectedMultiplex(g, weights, resolutions, all, src)
+ case DirectedMultiplex:
+ return louvainDirectedMultiplex(g, weights, resolutions, all, src)
+ default:
+ panic(fmt.Sprintf("community: invalid graph type: %T", g))
+ }
+}
+
+// undirectedEdges is the edge structure of a reduced undirected graph.
+type undirectedEdges struct {
+ // edges and weights is the set
+ // of edges between nodes.
+ // weights is keyed such that
+ // the first element of the key
+ // is less than the second.
+ edges [][]int
+ weights map[[2]int]float64
+}
+
+// directedEdges is the edge structure of a reduced directed graph.
+type directedEdges struct {
+ // edgesFrom, edgesTo and weights
+ // is the set of edges between nodes.
+ edgesFrom [][]int
+ edgesTo [][]int
+ weights map[[2]int]float64
+}
+
+// community is a reduced graph node describing its membership.
+type community struct {
+ id int
+
+ nodes []graph.Node
+
+ weight float64
+}
+
+func (n community) ID() int { return n.id }
+
+// edge is a reduced graph edge.
+type edge struct {
+ from, to community
+ weight float64
+}
+
+func (e edge) From() graph.Node { return e.from }
+func (e edge) To() graph.Node { return e.to }
+func (e edge) Weight() float64 { return e.weight }
+
+// multiplexCommunity is a reduced multiplex graph node describing its membership.
+type multiplexCommunity struct {
+ id int
+
+ nodes []graph.Node
+
+ weights []float64
+}
+
+func (n multiplexCommunity) ID() int { return n.id }
+
+// multiplexEdge is a reduced graph edge for a multiplex graph.
+type multiplexEdge struct {
+ from, to multiplexCommunity
+ weight float64
+}
+
+func (e multiplexEdge) From() graph.Node { return e.from }
+func (e multiplexEdge) To() graph.Node { return e.to }
+func (e multiplexEdge) Weight() float64 { return e.weight }
+
+// commIdx is an index of a node in a community held by a localMover.
+type commIdx struct {
+ community int
+ node int
+}
+
+// node is defined to avoid an import of .../graph/simple.
+type node int
+
+func (n node) ID() int { return int(n) }
+
+// minTaker is a set iterator.
+type minTaker interface {
+ TakeMin(p *int) bool
+}
+
+// dense is a dense integer set iterator.
+type dense struct {
+ pos int
+ n int
+}
+
+// TakeMin mimics intsets.Sparse TakeMin for dense sets. If the dense
+// iterator position is less than the iterator size, TakeMin sets *p
+// to the the iterator position and increments the position and returns
+// true.
+// Otherwise, it returns false and *p is undefined.
+func (d *dense) TakeMin(p *int) bool {
+ if d.pos >= d.n {
+ return false
+ }
+ *p = d.pos
+ d.pos++
+ return true
+}
+
+const (
+ negativeWeight = "community: unexpected negative edge weight"
+ positiveWeight = "community: unexpected positive edge weight"
+)
+
+// positiveWeightFuncFor returns a constructed weight function for the
+// positively weighted g.
+func positiveWeightFuncFor(g graph.Graph) func(x, y graph.Node) float64 {
+ if wg, ok := g.(graph.Weighter); ok {
+ return func(x, y graph.Node) float64 {
+ w, ok := wg.Weight(x, y)
+ if !ok {
+ return 0
+ }
+ if w < 0 {
+ panic(negativeWeight)
+ }
+ return w
+ }
+ }
+ return func(x, y graph.Node) float64 {
+ e := g.Edge(x, y)
+ if e == nil {
+ return 0
+ }
+ w := e.Weight()
+ if w < 0 {
+ panic(negativeWeight)
+ }
+ return w
+ }
+}
+
+// negativeWeightFuncFor returns a constructed weight function for the
+// negatively weighted g.
+func negativeWeightFuncFor(g graph.Graph) func(x, y graph.Node) float64 {
+ if wg, ok := g.(graph.Weighter); ok {
+ return func(x, y graph.Node) float64 {
+ w, ok := wg.Weight(x, y)
+ if !ok {
+ return 0
+ }
+ if w > 0 {
+ panic(positiveWeight)
+ }
+ return -w
+ }
+ }
+ return func(x, y graph.Node) float64 {
+ e := g.Edge(x, y)
+ if e == nil {
+ return 0
+ }
+ w := e.Weight()
+ if w > 0 {
+ panic(positiveWeight)
+ }
+ return -w
+ }
+}
+
+// depth returns max(1, len(weights)). It is used to ensure
+// that multiplex community weights are properly initialised.
+func depth(weights []float64) int {
+ if weights == nil {
+ return 1
+ }
+ return len(weights)
+}
diff --git a/vendor/github.com/gonum/graph/community/louvain_directed.go b/vendor/github.com/gonum/graph/community/louvain_directed.go
new file mode 100644
index 000000000000..a5a15f940238
--- /dev/null
+++ b/vendor/github.com/gonum/graph/community/louvain_directed.go
@@ -0,0 +1,633 @@
+// Copyright ©2015 The gonum Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package community
+
+import (
+ "math"
+ "math/rand"
+ "sort"
+
+ "golang.org/x/tools/container/intsets"
+
+ "github.com/gonum/graph"
+ "github.com/gonum/graph/internal/ordered"
+)
+
+// qDirected returns the modularity Q score of the graph g subdivided into the
+// given communities at the given resolution. If communities is nil, the
+// unclustered modularity score is returned. The resolution parameter
+// is γ as defined in Reichardt and Bornholdt doi:10.1103/PhysRevE.74.016110.
+// qDirected will panic if g has any edge with negative edge weight.
+//
+// Q = 1/m \sum_{ij} [ A_{ij} - (\gamma k_i^in k_j^out)/m ] \delta(c_i,c_j)
+//
+func qDirected(g graph.Directed, communities [][]graph.Node, resolution float64) float64 {
+ nodes := g.Nodes()
+ weight := positiveWeightFuncFor(g)
+
+ // Calculate the total edge weight of the graph
+ // and the table of penetrating edge weight sums.
+ var m float64
+ k := make(map[int]directedWeights, len(nodes))
+ for _, n := range nodes {
+ var wOut float64
+ u := n
+ for _, v := range g.From(u) {
+ wOut += weight(u, v)
+ }
+ var wIn float64
+ v := n
+ for _, u := range g.To(v) {
+ wIn += weight(u, v)
+ }
+ w := weight(n, n)
+ m += w + wOut // We only need to count edges once.
+ k[n.ID()] = directedWeights{out: w + wOut, in: w + wIn}
+ }
+
+ if communities == nil {
+ var q float64
+ for _, u := range nodes {
+ kU := k[u.ID()]
+ q += weight(u, u) - resolution*kU.out*kU.in/m
+ }
+ return q / m
+ }
+
+ var q float64
+ for _, c := range communities {
+ for _, u := range c {
+ kU := k[u.ID()]
+ for _, v := range c {
+ kV := k[v.ID()]
+ q += weight(u, v) - resolution*kU.out*kV.in/m
+ }
+ }
+ }
+ return q / m
+}
+
+// louvainDirected returns the hierarchical modularization of g at the given
+// resolution using the Louvain algorithm. If src is nil, rand.Intn is used
+// as the random generator. louvainDirected will panic if g has any edge with negative
+// edge weight.
+func louvainDirected(g graph.Directed, resolution float64, src *rand.Rand) ReducedGraph {
+ // See louvain.tex for a detailed description
+ // of the algorithm used here.
+
+ c := reduceDirected(g, nil)
+ rnd := rand.Intn
+ if src != nil {
+ rnd = src.Intn
+ }
+ for {
+ l := newDirectedLocalMover(c, c.communities, resolution)
+ if l == nil {
+ return c
+ }
+ if done := l.localMovingHeuristic(rnd); done {
+ return c
+ }
+ c = reduceDirected(c, l.communities)
+ }
+}
+
+// ReducedDirected is a directed graph of communities derived from a
+// parent graph by reduction.
+type ReducedDirected struct {
+ // nodes is the set of nodes held
+ // by the graph. In a ReducedDirected
+ // the node ID is the index into
+ // nodes.
+ nodes []community
+ directedEdges
+
+ // communities is the community
+ // structure of the graph.
+ communities [][]graph.Node
+
+ parent *ReducedDirected
+}
+
+var (
+ _ graph.Directed = (*ReducedDirected)(nil)
+ _ graph.Weighter = (*ReducedDirected)(nil)
+ _ ReducedGraph = (*ReducedUndirected)(nil)
+)
+
+// Communities returns the community memberships of the nodes in the
+// graph used to generate the reduced graph.
+func (g *ReducedDirected) Communities() [][]graph.Node {
+ communities := make([][]graph.Node, len(g.communities))
+ if g.parent == nil {
+ for i, members := range g.communities {
+ comm := make([]graph.Node, len(members))
+ for j, n := range members {
+ nodes := g.nodes[n.ID()].nodes
+ if len(nodes) != 1 {
+ panic("community: unexpected number of nodes in base graph community")
+ }
+ comm[j] = nodes[0]
+ }
+ communities[i] = comm
+ }
+ return communities
+ }
+ sub := g.parent.Communities()
+ for i, members := range g.communities {
+ var comm []graph.Node
+ for _, n := range members {
+ comm = append(comm, sub[n.ID()]...)
+ }
+ communities[i] = comm
+ }
+ return communities
+}
+
+// Structure returns the community structure of the current level of
+// the module clustering. The first index of the returned value
+// corresponds to the index of the nodes in the next higher level if
+// it exists. The returned value should not be mutated.
+func (g *ReducedDirected) Structure() [][]graph.Node {
+ return g.communities
+}
+
+// Expanded returns the next lower level of the module clustering or nil
+// if at the lowest level.
+func (g *ReducedDirected) Expanded() ReducedGraph {
+ return g.parent
+}
+
+// reduceDirected returns a reduced graph constructed from g divided
+// into the given communities. The communities value is mutated
+// by the call to reduceDirected. If communities is nil and g is a
+// ReducedDirected, it is returned unaltered.
+func reduceDirected(g graph.Directed, communities [][]graph.Node) *ReducedDirected {
+ if communities == nil {
+ if r, ok := g.(*ReducedDirected); ok {
+ return r
+ }
+
+ nodes := g.Nodes()
+ // TODO(kortschak) This sort is necessary really only
+ // for testing. In practice we would not be using the
+ // community provided by the user for a Q calculation.
+ // Probably we should use a function to map the
+ // communities in the test sets to the remapped order.
+ sort.Sort(ordered.ByID(nodes))
+ communities = make([][]graph.Node, len(nodes))
+ for i := range nodes {
+ communities[i] = []graph.Node{node(i)}
+ }
+
+ weight := positiveWeightFuncFor(g)
+ r := ReducedDirected{
+ nodes: make([]community, len(nodes)),
+ directedEdges: directedEdges{
+ edgesFrom: make([][]int, len(nodes)),
+ edgesTo: make([][]int, len(nodes)),
+ weights: make(map[[2]int]float64),
+ },
+ communities: communities,
+ }
+ communityOf := make(map[int]int, len(nodes))
+ for i, n := range nodes {
+ r.nodes[i] = community{id: i, nodes: []graph.Node{n}}
+ communityOf[n.ID()] = i
+ }
+ for _, n := range nodes {
+ id := communityOf[n.ID()]
+
+ var out []int
+ u := n
+ for _, v := range g.From(u) {
+ vid := communityOf[v.ID()]
+ if vid != id {
+ out = append(out, vid)
+ }
+ r.weights[[2]int{id, vid}] = weight(u, v)
+ }
+ r.edgesFrom[id] = out
+
+ var in []int
+ v := n
+ for _, u := range g.To(v) {
+ uid := communityOf[u.ID()]
+ if uid != id {
+ in = append(in, uid)
+ }
+ r.weights[[2]int{uid, id}] = weight(u, v)
+ }
+ r.edgesTo[id] = in
+ }
+ return &r
+ }
+
+ // Remove zero length communities destructively.
+ var commNodes int
+ for i := 0; i < len(communities); {
+ comm := communities[i]
+ if len(comm) == 0 {
+ communities[i] = communities[len(communities)-1]
+ communities[len(communities)-1] = nil
+ communities = communities[:len(communities)-1]
+ } else {
+ commNodes += len(comm)
+ i++
+ }
+ }
+
+ r := ReducedDirected{
+ nodes: make([]community, len(communities)),
+ directedEdges: directedEdges{
+ edgesFrom: make([][]int, len(communities)),
+ edgesTo: make([][]int, len(communities)),
+ weights: make(map[[2]int]float64),
+ },
+ }
+ r.communities = make([][]graph.Node, len(communities))
+ for i := range r.communities {
+ r.communities[i] = []graph.Node{node(i)}
+ }
+ if g, ok := g.(*ReducedDirected); ok {
+ // Make sure we retain the truncated
+ // community structure.
+ g.communities = communities
+ r.parent = g
+ }
+ weight := positiveWeightFuncFor(g)
+ communityOf := make(map[int]int, commNodes)
+ for i, comm := range communities {
+ r.nodes[i] = community{id: i, nodes: comm}
+ for _, n := range comm {
+ communityOf[n.ID()] = i
+ }
+ }
+ for id, comm := range communities {
+ var out, in []int
+ for _, n := range comm {
+ u := n
+ for _, v := range comm {
+ r.nodes[id].weight += weight(u, v)
+ }
+
+ for _, v := range g.From(u) {
+ vid := communityOf[v.ID()]
+ found := false
+ for _, e := range out {
+ if e == vid {
+ found = true
+ break
+ }
+ }
+ if !found && vid != id {
+ out = append(out, vid)
+ }
+ // Add half weights because the other
+ // ends of edges are also counted.
+ r.weights[[2]int{id, vid}] += weight(u, v) / 2
+ }
+
+ v := n
+ for _, u := range g.To(v) {
+ uid := communityOf[u.ID()]
+ found := false
+ for _, e := range in {
+ if e == uid {
+ found = true
+ break
+ }
+ }
+ if !found && uid != id {
+ in = append(in, uid)
+ }
+ // Add half weights because the other
+ // ends of edges are also counted.
+ r.weights[[2]int{uid, id}] += weight(u, v) / 2
+ }
+ }
+ r.edgesFrom[id] = out
+ r.edgesTo[id] = in
+ }
+ return &r
+}
+
+// Has returns whether the node exists within the graph.
+func (g *ReducedDirected) Has(n graph.Node) bool {
+ id := n.ID()
+ return id >= 0 || id < len(g.nodes)
+}
+
+// Nodes returns all the nodes in the graph.
+func (g *ReducedDirected) Nodes() []graph.Node {
+ nodes := make([]graph.Node, len(g.nodes))
+ for i := range g.nodes {
+ nodes[i] = node(i)
+ }
+ return nodes
+}
+
+// From returns all nodes in g that can be reached directly from u.
+func (g *ReducedDirected) From(u graph.Node) []graph.Node {
+ out := g.edgesFrom[u.ID()]
+ nodes := make([]graph.Node, len(out))
+ for i, vid := range out {
+ nodes[i] = g.nodes[vid]
+ }
+ return nodes
+}
+
+// To returns all nodes in g that can reach directly to v.
+func (g *ReducedDirected) To(v graph.Node) []graph.Node {
+ in := g.edgesTo[v.ID()]
+ nodes := make([]graph.Node, len(in))
+ for i, uid := range in {
+ nodes[i] = g.nodes[uid]
+ }
+ return nodes
+}
+
+// HasEdgeBetween returns whether an edge exists between nodes x and y.
+func (g *ReducedDirected) HasEdgeBetween(x, y graph.Node) bool {
+ xid := x.ID()
+ yid := y.ID()
+ if xid == yid {
+ return false
+ }
+ _, ok := g.weights[[2]int{xid, yid}]
+ if ok {
+ return true
+ }
+ _, ok = g.weights[[2]int{yid, xid}]
+ return ok
+}
+
+// HasEdgeFromTo returns whether an edge exists from node u to v.
+func (g *ReducedDirected) HasEdgeFromTo(u, v graph.Node) bool {
+ uid := u.ID()
+ vid := v.ID()
+ if uid == vid {
+ return false
+ }
+ _, ok := g.weights[[2]int{uid, vid}]
+ return ok
+}
+
+// Edge returns the edge from u to v if such an edge exists and nil otherwise.
+// The node v must be directly reachable from u as defined by the From method.
+func (g *ReducedDirected) Edge(u, v graph.Node) graph.Edge {
+ uid := u.ID()
+ vid := v.ID()
+ w, ok := g.weights[[2]int{uid, vid}]
+ if !ok {
+ return nil
+ }
+ return edge{from: g.nodes[uid], to: g.nodes[vid], weight: w}
+}
+
+// Weight returns the weight for the edge between x and y if Edge(x, y) returns a non-nil Edge.
+// If x and y are the same node the internal node weight is returned. If there is no joining
+// edge between the two nodes the weight value returned is zero. Weight returns true if an edge
+// exists between x and y or if x and y have the same ID, false otherwise.
+func (g *ReducedDirected) Weight(x, y graph.Node) (w float64, ok bool) {
+ xid := x.ID()
+ yid := y.ID()
+ if xid == yid {
+ return g.nodes[xid].weight, true
+ }
+ w, ok = g.weights[[2]int{xid, yid}]
+ return w, ok
+}
+
+// directedLocalMover is a step in graph modularity optimization.
+type directedLocalMover struct {
+ g *ReducedDirected
+
+ // nodes is the set of working nodes.
+ nodes []graph.Node
+ // edgeWeightsOf is the weighted degree
+ // of each node indexed by ID.
+ edgeWeightsOf []directedWeights
+
+ // m is the total sum of edge
+ // weights in g.
+ m float64
+
+ // weight is the weight function
+ // provided by g or a function
+ // that returns the Weight value
+ // of the non-nil edge between x
+ // and y.
+ weight func(x, y graph.Node) float64
+
+ // communities is the current
+ // division of g.
+ communities [][]graph.Node
+ // memberships is a mapping between
+ // node ID and community membership.
+ memberships []int
+
+ // resolution is the Reichardt and
+ // Bornholdt γ parameter as defined
+ // in doi:10.1103/PhysRevE.74.016110.
+ resolution float64
+
+ // moved indicates that a call to
+ // move has been made since the last
+ // call to shuffle.
+ moved bool
+
+ // changed indicates that a move
+ // has been made since the creation
+ // of the local mover.
+ changed bool
+}
+
+type directedWeights struct {
+ out, in float64
+}
+
+// newDirectedLocalMover returns a new directedLocalMover initialized with
+// the graph g, a set of communities and a modularity resolution parameter.
+// The node IDs of g must be contiguous in [0,n) where n is the number of
+// nodes.
+// If g has a zero edge weight sum, nil is returned.
+func newDirectedLocalMover(g *ReducedDirected, communities [][]graph.Node, resolution float64) *directedLocalMover {
+ nodes := g.Nodes()
+ l := directedLocalMover{
+ g: g,
+ nodes: nodes,
+ edgeWeightsOf: make([]directedWeights, len(nodes)),
+ communities: communities,
+ memberships: make([]int, len(nodes)),
+ resolution: resolution,
+ weight: positiveWeightFuncFor(g),
+ }
+
+ // Calculate the total edge weight of the graph
+ // and degree weights for each node.
+ for _, n := range l.nodes {
+ u := n
+ var wOut float64
+ for _, v := range g.From(u) {
+ wOut += l.weight(u, v)
+ }
+
+ v := n
+ var wIn float64
+ for _, u := range g.To(v) {
+ wIn += l.weight(u, v)
+ }
+
+ w := l.weight(n, n)
+ l.edgeWeightsOf[n.ID()] = directedWeights{out: w + wOut, in: w + wIn}
+ l.m += w + wOut
+ }
+
+ // Assign membership mappings.
+ for i, c := range communities {
+ for _, n := range c {
+ l.memberships[n.ID()] = i
+ }
+ }
+
+ return &l
+}
+
+// localMovingHeuristic performs the Louvain local moving heuristic until
+// no further moves can be made. It returns a boolean indicating that the
+// directedLocalMover has not made any improvement to the community structure and
+// so the Louvain algorithm is done.
+func (l *directedLocalMover) localMovingHeuristic(rnd func(int) int) (done bool) {
+ for {
+ l.shuffle(rnd)
+ for _, n := range l.nodes {
+ dQ, dst, src := l.deltaQ(n)
+ if dQ <= 0 {
+ continue
+ }
+ l.move(dst, src)
+ }
+ if !l.moved {
+ return !l.changed
+ }
+ }
+}
+
+// shuffle performs a Fisher-Yates shuffle on the nodes held by the
+// directedLocalMover using the random source rnd which should return an
+// integer in the range [0,n).
+func (l *directedLocalMover) shuffle(rnd func(n int) int) {
+ l.moved = false
+ for i := range l.nodes[:len(l.nodes)-1] {
+ j := i + rnd(len(l.nodes)-i)
+ l.nodes[i], l.nodes[j] = l.nodes[j], l.nodes[i]
+ }
+}
+
+// move moves the node at src to the community at dst.
+func (l *directedLocalMover) move(dst int, src commIdx) {
+ l.moved = true
+ l.changed = true
+
+ srcComm := l.communities[src.community]
+ n := srcComm[src.node]
+
+ l.memberships[n.ID()] = dst
+
+ l.communities[dst] = append(l.communities[dst], n)
+ srcComm[src.node], srcComm[len(srcComm)-1] = srcComm[len(srcComm)-1], nil
+ l.communities[src.community] = srcComm[:len(srcComm)-1]
+}
+
+// deltaQ returns the highest gain in modularity attainable by moving
+// n from its current community to another connected community and
+// the index of the chosen destination. The index into the directedLocalMover's
+// communities field is returned in src if n is in communities.
+func (l *directedLocalMover) deltaQ(n graph.Node) (deltaQ float64, dst int, src commIdx) {
+ id := n.ID()
+
+ a_aa := l.weight(n, n)
+ k_a := l.edgeWeightsOf[id]
+ m := l.m
+ gamma := l.resolution
+
+ // Find communites connected to n.
+ var connected intsets.Sparse
+ // The following for loop is equivalent to:
+ //
+ // for _, v := range l.g.From(n) {
+ // connected.Insert(l.memberships[v.ID()])
+ // }
+ // for _, v := range l.g.To(n) {
+ // connected.Insert(l.memberships[v.ID()])
+ // }
+ //
+ // This is done to avoid two allocations.
+ for _, vid := range l.g.edgesFrom[id] {
+ connected.Insert(l.memberships[vid])
+ }
+ for _, vid := range l.g.edgesTo[id] {
+ connected.Insert(l.memberships[vid])
+ }
+ // Insert the node's own community.
+ connected.Insert(l.memberships[id])
+
+ // Calculate the highest modularity gain
+ // from moving into another community and
+ // keep the index of that community.
+ var dQremove float64
+ dQadd, dst, src := math.Inf(-1), -1, commIdx{-1, -1}
+ var i int
+ for connected.TakeMin(&i) {
+ c := l.communities[i]
+ var k_aC, sigma_totC directedWeights // C is a substitution for ^𝛼 or ^𝛽.
+ var removal bool
+ for j, u := range c {
+ uid := u.ID()
+ if uid == id {
+ if src.community != -1 {
+ panic("community: multiple sources")
+ }
+ src = commIdx{i, j}
+ removal = true
+ }
+
+ k_aC.in += l.weight(u, n)
+ k_aC.out += l.weight(n, u)
+ // sigma_totC could be kept for each community
+ // and updated for moves, changing the calculation
+ // of sigma_totC here from O(n_c) to O(1), but
+ // in practice the time savings do not appear
+ // to be compelling and do not make up for the
+ // increase in code complexity and space required.
+ w := l.edgeWeightsOf[uid]
+ sigma_totC.in += w.in
+ sigma_totC.out += w.out
+ }
+
+ // See louvain.tex for a derivation of these equations.
+ switch {
+ case removal:
+ // The community c was the current community,
+ // so calculate the change due to removal.
+ dQremove = (k_aC.in /*^𝛼*/ - a_aa) + (k_aC.out /*^𝛼*/ - a_aa) -
+ gamma*(k_a.in*(sigma_totC.out /*^𝛼*/ -k_a.out)+k_a.out*(sigma_totC.in /*^𝛼*/ -k_a.in))/m
+
+ default:
+ // Otherwise calculate the change due to an addition
+ // to c and retain if it is the current best.
+ dQ := k_aC.in /*^𝛽*/ + k_aC.out /*^𝛽*/ -
+ gamma*(k_a.in*sigma_totC.out /*^𝛽*/ +k_a.out*sigma_totC.in /*^𝛽*/)/m
+
+ if dQ > dQadd {
+ dQadd = dQ
+ dst = i
+ }
+ }
+ }
+
+ return (dQadd - dQremove) / m, dst, src
+}
diff --git a/vendor/github.com/gonum/graph/community/louvain_directed_multiplex.go b/vendor/github.com/gonum/graph/community/louvain_directed_multiplex.go
new file mode 100644
index 000000000000..6210bd3eacbc
--- /dev/null
+++ b/vendor/github.com/gonum/graph/community/louvain_directed_multiplex.go
@@ -0,0 +1,880 @@
+// Copyright ©2015 The gonum Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package community
+
+import (
+ "fmt"
+ "math"
+ "math/rand"
+ "sort"
+
+ "golang.org/x/tools/container/intsets"
+
+ "github.com/gonum/graph"
+ "github.com/gonum/graph/internal/ordered"
+)
+
+// DirectedMultiplex is a directed multiplex graph.
+type DirectedMultiplex interface {
+ Multiplex
+
+ // Layer returns the lth layer of the
+ // multiplex graph.
+ Layer(l int) graph.Directed
+}
+
+// qDirectedMultiplex returns the modularity Q score of the multiplex graph layers
+// subdivided into the given communities at the given resolutions and weights. Q is
+// returned as the vector of weighted Q scores for each layer of the multiplex graph.
+// If communities is nil, the unclustered modularity score is returned.
+// If weights is nil layers are equally weighted, otherwise the length of
+// weights must equal the number of layers. If resolutions is nil, a resolution
+// of 1.0 is used for all layers, otherwise either a single element slice may be used
+// to specify a global resolution, or the length of resolutions must equal the number
+// of layers. The resolution parameter is γ as defined in Reichardt and Bornholdt
+// doi:10.1103/PhysRevE.74.016110.
+// qUndirectedMultiplex will panic if the graph has any layer weight-scaled edge with
+// negative edge weight.
+//
+// Q_{layer} = w_{layer} \sum_{ij} [ A_{layer}*_{ij} - (\gamma_{layer} k_i k_j)/2m ] \delta(c_i,c_j)
+//
+// Note that Q values for multiplex graphs are not scaled by the total layer edge weight.
+func qDirectedMultiplex(g DirectedMultiplex, communities [][]graph.Node, weights, resolutions []float64) []float64 {
+ q := make([]float64, g.Depth())
+ nodes := g.Nodes()
+ layerWeight := 1.0
+ layerResolution := 1.0
+ if len(resolutions) == 1 {
+ layerResolution = resolutions[0]
+ }
+ for l := 0; l < g.Depth(); l++ {
+ layer := g.Layer(l)
+
+ if weights != nil {
+ layerWeight = weights[l]
+ }
+ if layerWeight == 0 {
+ continue
+ }
+
+ if len(resolutions) > 1 {
+ layerResolution = resolutions[l]
+ }
+
+ var weight func(x, y graph.Node) float64
+ if layerWeight < 0 {
+ weight = negativeWeightFuncFor(layer)
+ } else {
+ weight = positiveWeightFuncFor(layer)
+ }
+
+ // Calculate the total edge weight of the layer
+ // and the table of penetrating edge weight sums.
+ var m float64
+ k := make(map[int]directedWeights, len(nodes))
+ for _, n := range nodes {
+ var wOut float64
+ u := n
+ for _, v := range layer.From(u) {
+ wOut += weight(u, v)
+ }
+ var wIn float64
+ v := n
+ for _, u := range layer.To(v) {
+ wIn += weight(u, v)
+ }
+ w := weight(n, n)
+ m += w + wOut // We only need to count edges once.
+ k[n.ID()] = directedWeights{out: w + wOut, in: w + wIn}
+ }
+
+ if communities == nil {
+ var qLayer float64
+ for _, u := range nodes {
+ kU := k[u.ID()]
+ qLayer += weight(u, u) - layerResolution*kU.out*kU.in/m
+ }
+ q[l] = layerWeight * qLayer
+ continue
+ }
+
+ var qLayer float64
+ for _, c := range communities {
+ for _, u := range c {
+ kU := k[u.ID()]
+ for _, v := range c {
+ kV := k[v.ID()]
+ qLayer += weight(u, v) - layerResolution*kU.out*kV.in/m
+ }
+ }
+ }
+ q[l] = layerWeight * qLayer
+ }
+
+ return q
+}
+
+// DirectedLayers implements DirectedMultiplex.
+type DirectedLayers []graph.Directed
+
+// NewDirectedLayers returns a DirectedLayers using the provided layers
+// ensuring there is a match between IDs for each layer.
+func NewDirectedLayers(layers ...graph.Directed) (DirectedLayers, error) {
+ if len(layers) == 0 {
+ return nil, nil
+ }
+ var base, next intsets.Sparse
+ for _, n := range layers[0].Nodes() {
+ base.Insert(n.ID())
+ }
+ for i, l := range layers[1:] {
+ next.Clear()
+ for _, n := range l.Nodes() {
+ next.Insert(n.ID())
+ }
+ if !next.Equals(&base) {
+ return nil, fmt.Errorf("community: layer ID mismatch between layers: %d", i+1)
+ }
+ }
+ return layers, nil
+}
+
+// Nodes returns the nodes of the receiver.
+func (g DirectedLayers) Nodes() []graph.Node {
+ if len(g) == 0 {
+ return nil
+ }
+ return g[0].Nodes()
+}
+
+// Depth returns the depth of the multiplex graph.
+func (g DirectedLayers) Depth() int { return len(g) }
+
+// Layer returns the lth layer of the multiplex graph.
+func (g DirectedLayers) Layer(l int) graph.Directed { return g[l] }
+
+// louvainDirectedMultiplex returns the hierarchical modularization of g at the given resolution
+// using the Louvain algorithm. If all is true and g has negatively weighted layers, all
+// communities will be searched during the modularization. If src is nil, rand.Intn is
+// used as the random generator. louvainDirectedMultiplex will panic if g has any edge with
+// edge weight that does not sign-match the layer weight.
+//
+// graph.Undirect may be used as a shim to allow modularization of directed graphs.
+func louvainDirectedMultiplex(g DirectedMultiplex, weights, resolutions []float64, all bool, src *rand.Rand) *ReducedDirectedMultiplex {
+ if weights != nil && len(weights) != g.Depth() {
+ panic("community: weights vector length mismatch")
+ }
+ if resolutions != nil && len(resolutions) != 1 && len(resolutions) != g.Depth() {
+ panic("community: resolutions vector length mismatch")
+ }
+
+ // See louvain.tex for a detailed description
+ // of the algorithm used here.
+
+ c := reduceDirectedMultiplex(g, nil, weights)
+ rnd := rand.Intn
+ if src != nil {
+ rnd = src.Intn
+ }
+ for {
+ l := newDirectedMultiplexLocalMover(c, c.communities, weights, resolutions, all)
+ if l == nil {
+ return c
+ }
+ if done := l.localMovingHeuristic(rnd); done {
+ return c
+ }
+ c = reduceDirectedMultiplex(c, l.communities, weights)
+ }
+}
+
+// ReducedDirectedMultiplex is a directed graph of communities derived from a
+// parent graph by reduction.
+type ReducedDirectedMultiplex struct {
+ // nodes is the set of nodes held
+ // by the graph. In a ReducedDirectedMultiplex
+ // the node ID is the index into
+ // nodes.
+ nodes []multiplexCommunity
+ layers []directedEdges
+
+ // communities is the community
+ // structure of the graph.
+ communities [][]graph.Node
+
+ parent *ReducedDirectedMultiplex
+}
+
+var (
+ _ DirectedMultiplex = (*ReducedDirectedMultiplex)(nil)
+ _ graph.Directed = (*directedLayerHandle)(nil)
+ _ graph.Weighter = (*directedLayerHandle)(nil)
+)
+
+// Nodes returns all the nodes in the graph.
+func (g *ReducedDirectedMultiplex) Nodes() []graph.Node {
+ nodes := make([]graph.Node, len(g.nodes))
+ for i := range g.nodes {
+ nodes[i] = node(i)
+ }
+ return nodes
+}
+
+// Depth returns the number of layers in the multiplex graph.
+func (g *ReducedDirectedMultiplex) Depth() int { return len(g.layers) }
+
+// Layer returns the lth layer of the multiplex graph.
+func (g *ReducedDirectedMultiplex) Layer(l int) graph.Directed {
+ return directedLayerHandle{multiplex: g, layer: l}
+}
+
+// Communities returns the community memberships of the nodes in the
+// graph used to generate the reduced graph.
+func (g *ReducedDirectedMultiplex) Communities() [][]graph.Node {
+ communities := make([][]graph.Node, len(g.communities))
+ if g.parent == nil {
+ for i, members := range g.communities {
+ comm := make([]graph.Node, len(members))
+ for j, n := range members {
+ nodes := g.nodes[n.ID()].nodes
+ if len(nodes) != 1 {
+ panic("community: unexpected number of nodes in base graph community")
+ }
+ comm[j] = nodes[0]
+ }
+ communities[i] = comm
+ }
+ return communities
+ }
+ sub := g.parent.Communities()
+ for i, members := range g.communities {
+ var comm []graph.Node
+ for _, n := range members {
+ comm = append(comm, sub[n.ID()]...)
+ }
+ communities[i] = comm
+ }
+ return communities
+}
+
+// Structure returns the community structure of the current level of
+// the module clustering. The first index of the returned value
+// corresponds to the index of the nodes in the next higher level if
+// it exists. The returned value should not be mutated.
+func (g *ReducedDirectedMultiplex) Structure() [][]graph.Node {
+ return g.communities
+}
+
+// Expanded returns the next lower level of the module clustering or nil
+// if at the lowest level.
+func (g *ReducedDirectedMultiplex) Expanded() ReducedMultiplex {
+ return g.parent
+}
+
+// reduceDirectedMultiplex returns a reduced graph constructed from g divided
+// into the given communities. The communities value is mutated
+// by the call to reduceDirectedMultiplex. If communities is nil and g is a
+// ReducedDirectedMultiplex, it is returned unaltered.
+func reduceDirectedMultiplex(g DirectedMultiplex, communities [][]graph.Node, weights []float64) *ReducedDirectedMultiplex {
+ if communities == nil {
+ if r, ok := g.(*ReducedDirectedMultiplex); ok {
+ return r
+ }
+
+ nodes := g.Nodes()
+ // TODO(kortschak) This sort is necessary really only
+ // for testing. In practice we would not be using the
+ // community provided by the user for a Q calculation.
+ // Probably we should use a function to map the
+ // communities in the test sets to the remapped order.
+ sort.Sort(ordered.ByID(nodes))
+ communities = make([][]graph.Node, len(nodes))
+ for i := range nodes {
+ communities[i] = []graph.Node{node(i)}
+ }
+
+ r := ReducedDirectedMultiplex{
+ nodes: make([]multiplexCommunity, len(nodes)),
+ layers: make([]directedEdges, g.Depth()),
+ communities: communities,
+ }
+ communityOf := make(map[int]int, len(nodes))
+ for i, n := range nodes {
+ r.nodes[i] = multiplexCommunity{id: i, nodes: []graph.Node{n}, weights: make([]float64, depth(weights))}
+ communityOf[n.ID()] = i
+ }
+ for i := range r.layers {
+ r.layers[i] = directedEdges{
+ edgesFrom: make([][]int, len(nodes)),
+ edgesTo: make([][]int, len(nodes)),
+ weights: make(map[[2]int]float64),
+ }
+ }
+ w := 1.0
+ for l := 0; l < g.Depth(); l++ {
+ layer := g.Layer(l)
+ if weights != nil {
+ w = weights[l]
+ }
+ if w == 0 {
+ continue
+ }
+ var sign float64
+ var weight func(x, y graph.Node) float64
+ if w < 0 {
+ sign, weight = -1, negativeWeightFuncFor(layer)
+ } else {
+ sign, weight = 1, positiveWeightFuncFor(layer)
+ }
+ for _, n := range nodes {
+ id := communityOf[n.ID()]
+
+ var out []int
+ u := n
+ for _, v := range layer.From(u) {
+ vid := communityOf[v.ID()]
+ if vid != id {
+ out = append(out, vid)
+ }
+ r.layers[l].weights[[2]int{id, vid}] = sign * weight(u, v)
+ }
+ r.layers[l].edgesFrom[id] = out
+
+ var in []int
+ v := n
+ for _, u := range layer.To(v) {
+ uid := communityOf[u.ID()]
+ if uid != id {
+ in = append(in, uid)
+ }
+ r.layers[l].weights[[2]int{uid, id}] = sign * weight(u, v)
+ }
+ r.layers[l].edgesTo[id] = in
+ }
+ }
+ return &r
+ }
+
+ // Remove zero length communities destructively.
+ var commNodes int
+ for i := 0; i < len(communities); {
+ comm := communities[i]
+ if len(comm) == 0 {
+ communities[i] = communities[len(communities)-1]
+ communities[len(communities)-1] = nil
+ communities = communities[:len(communities)-1]
+ } else {
+ commNodes += len(comm)
+ i++
+ }
+ }
+
+ r := ReducedDirectedMultiplex{
+ nodes: make([]multiplexCommunity, len(communities)),
+ layers: make([]directedEdges, g.Depth()),
+ }
+ communityOf := make(map[int]int, commNodes)
+ for i, comm := range communities {
+ r.nodes[i] = multiplexCommunity{id: i, nodes: comm, weights: make([]float64, depth(weights))}
+ for _, n := range comm {
+ communityOf[n.ID()] = i
+ }
+ }
+ for i := range r.layers {
+ r.layers[i] = directedEdges{
+ edgesFrom: make([][]int, len(communities)),
+ edgesTo: make([][]int, len(communities)),
+ weights: make(map[[2]int]float64),
+ }
+ }
+ r.communities = make([][]graph.Node, len(communities))
+ for i := range r.communities {
+ r.communities[i] = []graph.Node{node(i)}
+ }
+ if g, ok := g.(*ReducedDirectedMultiplex); ok {
+ // Make sure we retain the truncated
+ // community structure.
+ g.communities = communities
+ r.parent = g
+ }
+ w := 1.0
+ for l := 0; l < g.Depth(); l++ {
+ layer := g.Layer(l)
+ if weights != nil {
+ w = weights[l]
+ }
+ if w == 0 {
+ continue
+ }
+ var sign float64
+ var weight func(x, y graph.Node) float64
+ if w < 0 {
+ sign, weight = -1, negativeWeightFuncFor(layer)
+ } else {
+ sign, weight = 1, positiveWeightFuncFor(layer)
+ }
+ for id, comm := range communities {
+ var out, in []int
+ for _, n := range comm {
+ u := n
+ for _, v := range comm {
+ r.nodes[id].weights[l] += sign * weight(u, v)
+ }
+
+ for _, v := range layer.From(u) {
+ vid := communityOf[v.ID()]
+ found := false
+ for _, e := range out {
+ if e == vid {
+ found = true
+ break
+ }
+ }
+ if !found && vid != id {
+ out = append(out, vid)
+ }
+ // Add half weights because the other
+ // ends of edges are also counted.
+ r.layers[l].weights[[2]int{id, vid}] += sign * weight(u, v) / 2
+ }
+
+ v := n
+ for _, u := range layer.To(v) {
+ uid := communityOf[u.ID()]
+ found := false
+ for _, e := range in {
+ if e == uid {
+ found = true
+ break
+ }
+ }
+ if !found && uid != id {
+ in = append(in, uid)
+ }
+ // Add half weights because the other
+ // ends of edges are also counted.
+ r.layers[l].weights[[2]int{uid, id}] += sign * weight(u, v) / 2
+ }
+
+ }
+ r.layers[l].edgesFrom[id] = out
+ r.layers[l].edgesTo[id] = in
+ }
+ }
+ return &r
+}
+
+// directedLayerHandle is a handle to a multiplex graph layer.
+type directedLayerHandle struct {
+ // multiplex is the complete
+ // multiplex graph.
+ multiplex *ReducedDirectedMultiplex
+
+ // layer is an index into the
+ // multiplex for the current
+ // layer.
+ layer int
+}
+
+// Has returns whether the node exists within the graph.
+func (g directedLayerHandle) Has(n graph.Node) bool {
+ id := n.ID()
+ return id >= 0 || id < len(g.multiplex.nodes)
+}
+
+// Nodes returns all the nodes in the graph.
+func (g directedLayerHandle) Nodes() []graph.Node {
+ nodes := make([]graph.Node, len(g.multiplex.nodes))
+ for i := range g.multiplex.nodes {
+ nodes[i] = node(i)
+ }
+ return nodes
+}
+
+// From returns all nodes in g that can be reached directly from u.
+func (g directedLayerHandle) From(u graph.Node) []graph.Node {
+ out := g.multiplex.layers[g.layer].edgesFrom[u.ID()]
+ nodes := make([]graph.Node, len(out))
+ for i, vid := range out {
+ nodes[i] = g.multiplex.nodes[vid]
+ }
+ return nodes
+}
+
+// To returns all nodes in g that can reach directly to v.
+func (g directedLayerHandle) To(v graph.Node) []graph.Node {
+ in := g.multiplex.layers[g.layer].edgesTo[v.ID()]
+ nodes := make([]graph.Node, len(in))
+ for i, uid := range in {
+ nodes[i] = g.multiplex.nodes[uid]
+ }
+ return nodes
+}
+
+// HasEdgeBetween returns whether an edge exists between nodes x and y.
+func (g directedLayerHandle) HasEdgeBetween(x, y graph.Node) bool {
+ xid := x.ID()
+ yid := y.ID()
+ if xid == yid {
+ return false
+ }
+ _, ok := g.multiplex.layers[g.layer].weights[[2]int{xid, yid}]
+ if ok {
+ return true
+ }
+ _, ok = g.multiplex.layers[g.layer].weights[[2]int{yid, xid}]
+ return ok
+}
+
+// HasEdgeFromTo returns whether an edge exists from node u to v.
+func (g directedLayerHandle) HasEdgeFromTo(u, v graph.Node) bool {
+ uid := u.ID()
+ vid := v.ID()
+ if uid == vid {
+ return false
+ }
+ _, ok := g.multiplex.layers[g.layer].weights[[2]int{uid, vid}]
+ return ok
+}
+
+// Edge returns the edge from u to v if such an edge exists and nil otherwise.
+// The node v must be directly reachable from u as defined by the From method.
+func (g directedLayerHandle) Edge(u, v graph.Node) graph.Edge {
+ uid := u.ID()
+ vid := v.ID()
+ w, ok := g.multiplex.layers[g.layer].weights[[2]int{uid, vid}]
+ if !ok {
+ return nil
+ }
+ return multiplexEdge{from: g.multiplex.nodes[u.ID()], to: g.multiplex.nodes[v.ID()], weight: w}
+}
+
+// EdgeBetween returns the edge between nodes x and y.
+func (g directedLayerHandle) EdgeBetween(x, y graph.Node) graph.Edge {
+ return g.Edge(x, y)
+}
+
+// Weight returns the weight for the edge between x and y if Edge(x, y) returns a non-nil Edge.
+// If x and y are the same node the internal node weight is returned. If there is no joining
+// edge between the two nodes the weight value returned is zero. Weight returns true if an edge
+// exists between x and y or if x and y have the same ID, false otherwise.
+func (g directedLayerHandle) Weight(x, y graph.Node) (w float64, ok bool) {
+ xid := x.ID()
+ yid := y.ID()
+ if xid == yid {
+ return g.multiplex.nodes[xid].weights[g.layer], true
+ }
+ w, ok = g.multiplex.layers[g.layer].weights[[2]int{xid, yid}]
+ return w, ok
+}
+
+// directedMultiplexLocalMover is a step in graph modularity optimization.
+type directedMultiplexLocalMover struct {
+ g *ReducedDirectedMultiplex
+
+ // nodes is the set of working nodes.
+ nodes []graph.Node
+ // edgeWeightsOf is the weighted degree
+ // of each node indexed by ID.
+ edgeWeightsOf [][]directedWeights
+
+ // m is the total sum of
+ // edge weights in g.
+ m []float64
+
+ // weight is the weight function
+ // provided by g or a function
+ // that returns the Weight value
+ // of the non-nil edge between x
+ // and y.
+ weight []func(x, y graph.Node) float64
+
+ // communities is the current
+ // division of g.
+ communities [][]graph.Node
+ // memberships is a mapping between
+ // node ID and community membership.
+ memberships []int
+
+ // resolution is the Reichardt and
+ // Bornholdt γ parameter as defined
+ // in doi:10.1103/PhysRevE.74.016110.
+ resolutions []float64
+
+ // weights is the layer weights for
+ // the modularisation.
+ weights []float64
+
+ // searchAll specifies whether the local
+ // mover should consider non-connected
+ // communities during the local moving
+ // heuristic.
+ searchAll bool
+
+ // moved indicates that a call to
+ // move has been made since the last
+ // call to shuffle.
+ moved bool
+
+ // changed indicates that a move
+ // has been made since the creation
+ // of the local mover.
+ changed bool
+}
+
+// newDirectedMultiplexLocalMover returns a new directedMultiplexLocalMover initialized with
+// the graph g, a set of communities and a modularity resolution parameter. The
+// node IDs of g must be contiguous in [0,n) where n is the number of nodes.
+// If g has a zero edge weight sum, nil is returned.
+func newDirectedMultiplexLocalMover(g *ReducedDirectedMultiplex, communities [][]graph.Node, weights, resolutions []float64, all bool) *directedMultiplexLocalMover {
+ nodes := g.Nodes()
+ l := directedMultiplexLocalMover{
+ g: g,
+ nodes: nodes,
+ edgeWeightsOf: make([][]directedWeights, g.Depth()),
+ m: make([]float64, g.Depth()),
+ communities: communities,
+ memberships: make([]int, len(nodes)),
+ resolutions: resolutions,
+ weights: weights,
+ weight: make([]func(x, y graph.Node) float64, g.Depth()),
+ }
+
+ // Calculate the total edge weight of the graph
+ // and degree weights for each node.
+ var zero int
+ for i := 0; i < g.Depth(); i++ {
+ l.edgeWeightsOf[i] = make([]directedWeights, len(nodes))
+ var weight func(x, y graph.Node) float64
+
+ if weights != nil {
+ if weights[i] == 0 {
+ zero++
+ continue
+ }
+ if weights[i] < 0 {
+ weight = negativeWeightFuncFor(g.Layer(i))
+ l.searchAll = all
+ } else {
+ weight = positiveWeightFuncFor(g.Layer(i))
+ }
+ } else {
+ weight = positiveWeightFuncFor(g.Layer(i))
+ }
+
+ l.weight[i] = weight
+ layer := g.Layer(i)
+ for _, n := range l.nodes {
+ u := n
+ var wOut float64
+ for _, v := range layer.From(u) {
+ wOut += weight(u, v)
+ }
+
+ v := n
+ var wIn float64
+ for _, u := range layer.To(v) {
+ wIn += weight(u, v)
+ }
+
+ w := weight(n, n)
+ l.edgeWeightsOf[i][u.ID()] = directedWeights{out: w + wOut, in: w + wIn}
+ l.m[i] += w + wOut
+ }
+ if l.m[i] == 0 {
+ zero++
+ }
+ }
+ if zero == g.Depth() {
+ return nil
+ }
+
+ // Assign membership mappings.
+ for i, c := range communities {
+ for _, n := range c {
+ l.memberships[n.ID()] = i
+ }
+ }
+
+ return &l
+}
+
+// localMovingHeuristic performs the Louvain local moving heuristic until
+// no further moves can be made. It returns a boolean indicating that the
+// directedMultiplexLocalMover has not made any improvement to the community
+// structure and so the Louvain algorithm is done.
+func (l *directedMultiplexLocalMover) localMovingHeuristic(rnd func(int) int) (done bool) {
+ for {
+ l.shuffle(rnd)
+ for _, n := range l.nodes {
+ dQ, dst, src := l.deltaQ(n)
+ if dQ <= 0 {
+ continue
+ }
+ l.move(dst, src)
+ }
+ if !l.moved {
+ return !l.changed
+ }
+ }
+}
+
+// shuffle performs a Fisher-Yates shuffle on the nodes held by the
+// directedMultiplexLocalMover using the random source rnd which should return
+// an integer in the range [0,n).
+func (l *directedMultiplexLocalMover) shuffle(rnd func(n int) int) {
+ l.moved = false
+ for i := range l.nodes[:len(l.nodes)-1] {
+ j := i + rnd(len(l.nodes)-i)
+ l.nodes[i], l.nodes[j] = l.nodes[j], l.nodes[i]
+ }
+}
+
+// move moves the node at src to the community at dst.
+func (l *directedMultiplexLocalMover) move(dst int, src commIdx) {
+ l.moved = true
+ l.changed = true
+
+ srcComm := l.communities[src.community]
+ n := srcComm[src.node]
+
+ l.memberships[n.ID()] = dst
+
+ l.communities[dst] = append(l.communities[dst], n)
+ srcComm[src.node], srcComm[len(srcComm)-1] = srcComm[len(srcComm)-1], nil
+ l.communities[src.community] = srcComm[:len(srcComm)-1]
+}
+
+// deltaQ returns the highest gain in modularity attainable by moving
+// n from its current community to another connected community and
+// the index of the chosen destination. The index into the
+// directedMultiplexLocalMover's communities field is returned in src if n
+// is in communities.
+func (l *directedMultiplexLocalMover) deltaQ(n graph.Node) (deltaQ float64, dst int, src commIdx) {
+ id := n.ID()
+
+ var iterator minTaker
+ if l.searchAll {
+ iterator = &dense{n: len(l.communities)}
+ } else {
+ // Find communities connected to n.
+ var connected intsets.Sparse
+ // The following for loop is equivalent to:
+ //
+ // for i := 0; i < l.g.Depth(); i++ {
+ // for _, v := range l.g.Layer(i).From(n) {
+ // connected.Insert(l.memberships[v.ID()])
+ // }
+ // for _, v := range l.g.Layer(i).To(n) {
+ // connected.Insert(l.memberships[v.ID()])
+ // }
+ // }
+ //
+ // This is done to avoid an allocation for
+ // each layer.
+ for _, layer := range l.g.layers {
+ for _, vid := range layer.edgesFrom[id] {
+ connected.Insert(l.memberships[vid])
+ }
+ for _, vid := range layer.edgesTo[id] {
+ connected.Insert(l.memberships[vid])
+ }
+ }
+ // Insert the node's own community.
+ connected.Insert(l.memberships[id])
+ iterator = &connected
+ }
+
+ // Calculate the highest modularity gain
+ // from moving into another community and
+ // keep the index of that community.
+ var dQremove float64
+ dQadd, dst, src := math.Inf(-1), -1, commIdx{-1, -1}
+ var i int
+ for iterator.TakeMin(&i) {
+ c := l.communities[i]
+ var removal bool
+ var _dQadd float64
+ for layer := 0; layer < l.g.Depth(); layer++ {
+ m := l.m[layer]
+ if m == 0 {
+ // Do not consider layers with zero sum edge weight.
+ continue
+ }
+ w := 1.0
+ if l.weights != nil {
+ w = l.weights[layer]
+ }
+ if w == 0 {
+ // Do not consider layers with zero weighting.
+ continue
+ }
+
+ var k_aC, sigma_totC directedWeights // C is a substitution for ^𝛼 or ^𝛽.
+ removal = false
+ for j, u := range c {
+ uid := u.ID()
+ if uid == id {
+ // Only mark and check src community on the first layer.
+ if layer == 0 {
+ if src.community != -1 {
+ panic("community: multiple sources")
+ }
+ src = commIdx{i, j}
+ }
+ removal = true
+ }
+
+ k_aC.in += l.weight[layer](n, u)
+ k_aC.out += l.weight[layer](u, n)
+ // sigma_totC could be kept for each community
+ // and updated for moves, changing the calculation
+ // of sigma_totC here from O(n_c) to O(1), but
+ // in practice the time savings do not appear
+ // to be compelling and do not make up for the
+ // increase in code complexity and space required.
+ w := l.edgeWeightsOf[layer][uid]
+ sigma_totC.in += w.in
+ sigma_totC.out += w.out
+ }
+
+ a_aa := l.weight[layer](n, n)
+ k_a := l.edgeWeightsOf[layer][id]
+ gamma := 1.0
+ if l.resolutions != nil {
+ if len(l.resolutions) == 1 {
+ gamma = l.resolutions[0]
+ } else {
+ gamma = l.resolutions[layer]
+ }
+ }
+
+ // See louvain.tex for a derivation of these equations.
+ // The weighting term, w, is described in V Traag,
+ // "Algorithms and dynamical models for communities and
+ // reputation in social networks", chapter 5.
+ // http://www.traag.net/wp/wp-content/papercite-data/pdf/traag_algorithms_2013.pdf
+ switch {
+ case removal:
+ // The community c was the current community,
+ // so calculate the change due to removal.
+ dQremove += w * ((k_aC.in /*^𝛼*/ - a_aa) + (k_aC.out /*^𝛼*/ - a_aa) -
+ gamma*(k_a.in*(sigma_totC.out /*^𝛼*/ -k_a.out)+k_a.out*(sigma_totC.in /*^𝛼*/ -k_a.in))/m)
+
+ default:
+ // Otherwise calculate the change due to an addition
+ // to c.
+ _dQadd += w * (k_aC.in /*^𝛽*/ + k_aC.out /*^𝛽*/ -
+ gamma*(k_a.in*sigma_totC.out /*^𝛽*/ +k_a.out*sigma_totC.in /*^𝛽*/)/m)
+ }
+ }
+ if !removal && _dQadd > dQadd {
+ dQadd = _dQadd
+ dst = i
+ }
+ }
+
+ return dQadd - dQremove, dst, src
+}
diff --git a/vendor/github.com/gonum/graph/community/louvain_directed_multiplex_test.go b/vendor/github.com/gonum/graph/community/louvain_directed_multiplex_test.go
new file mode 100644
index 000000000000..1e4e4fe51771
--- /dev/null
+++ b/vendor/github.com/gonum/graph/community/louvain_directed_multiplex_test.go
@@ -0,0 +1,700 @@
+// Copyright ©2015 The gonum Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package community
+
+import (
+ "math"
+ "math/rand"
+ "reflect"
+ "sort"
+ "testing"
+
+ "github.com/gonum/floats"
+ "github.com/gonum/graph"
+ "github.com/gonum/graph/internal/ordered"
+ "github.com/gonum/graph/simple"
+)
+
+var communityDirectedMultiplexQTests = []struct {
+ name string
+ layers []layer
+ structures []structure
+
+ wantLevels []level
+}{
+ {
+ name: "unconnected",
+ layers: []layer{{g: unconnected, weight: 1}},
+ structures: []structure{
+ {
+ resolution: 1,
+ memberships: []set{
+ 0: linksTo(0),
+ 1: linksTo(1),
+ 2: linksTo(2),
+ 3: linksTo(3),
+ 4: linksTo(4),
+ 5: linksTo(5),
+ },
+ want: math.NaN(),
+ },
+ },
+ wantLevels: []level{
+ {
+ q: math.Inf(-1), // Here math.Inf(-1) is used as a place holder for NaN to allow use of reflect.DeepEqual.
+ communities: [][]graph.Node{
+ {simple.Node(0)},
+ {simple.Node(1)},
+ {simple.Node(2)},
+ {simple.Node(3)},
+ {simple.Node(4)},
+ {simple.Node(5)},
+ },
+ },
+ },
+ },
+ {
+ name: "simple_directed",
+ layers: []layer{{g: simpleDirected, weight: 1}},
+ // community structure and modularity calculated by C++ implementation: louvain igraph.
+ // Note that louvain igraph returns Q as an unscaled value.
+ structures: []structure{
+ {
+ resolution: 1,
+ memberships: []set{
+ 0: linksTo(0, 1),
+ 1: linksTo(2, 3, 4),
+ },
+ want: 0.5714285714285716,
+ tol: 1e-10,
+ },
+ },
+ wantLevels: []level{
+ {
+ communities: [][]graph.Node{
+ {simple.Node(0), simple.Node(1)},
+ {simple.Node(2), simple.Node(3), simple.Node(4)},
+ },
+ q: 0.5714285714285716,
+ },
+ {
+ communities: [][]graph.Node{
+ {simple.Node(0)},
+ {simple.Node(1)},
+ {simple.Node(2)},
+ {simple.Node(3)},
+ {simple.Node(4)},
+ },
+ q: -1.2857142857142856,
+ },
+ },
+ },
+ {
+ name: "simple_directed_twice",
+ layers: []layer{
+ {g: simpleDirected, weight: 0.5},
+ {g: simpleDirected, weight: 0.5},
+ },
+ // community structure and modularity calculated by C++ implementation: louvain igraph.
+ // Note that louvain igraph returns Q as an unscaled value.
+ structures: []structure{
+ {
+ resolution: 1,
+ memberships: []set{
+ 0: linksTo(0, 1),
+ 1: linksTo(2, 3, 4),
+ },
+ want: 0.5714285714285716,
+ tol: 1e-10,
+ },
+ },
+ wantLevels: []level{
+ {
+ q: 0.5714285714285716,
+ communities: [][]graph.Node{
+ {simple.Node(0), simple.Node(1)},
+ {simple.Node(2), simple.Node(3), simple.Node(4)},
+ },
+ },
+ {
+ q: -1.2857142857142856,
+ communities: [][]graph.Node{
+ {simple.Node(0)},
+ {simple.Node(1)},
+ {simple.Node(2)},
+ {simple.Node(3)},
+ {simple.Node(4)},
+ },
+ },
+ },
+ },
+ {
+ name: "small_dumbell",
+ layers: []layer{
+ {g: smallDumbell, edgeWeight: 1, weight: 1},
+ {g: dumbellRepulsion, edgeWeight: -1, weight: -1},
+ },
+ structures: []structure{
+ {
+ resolution: 1,
+ memberships: []set{
+ 0: linksTo(0, 1, 2),
+ 1: linksTo(3, 4, 5),
+ },
+ want: 2.5714285714285716, tol: 1e-10,
+ },
+ {
+ resolution: 1,
+ memberships: []set{
+ 0: linksTo(0, 1, 2, 3, 4, 5),
+ },
+ want: 0, tol: 1e-14,
+ },
+ },
+ wantLevels: []level{
+ {
+ q: 2.5714285714285716,
+ communities: [][]graph.Node{
+ {simple.Node(0), simple.Node(1), simple.Node(2)},
+ {simple.Node(3), simple.Node(4), simple.Node(5)},
+ },
+ },
+ {
+ q: -0.857142857142857,
+ communities: [][]graph.Node{
+ {simple.Node(0)},
+ {simple.Node(1)},
+ {simple.Node(2)},
+ {simple.Node(3)},
+ {simple.Node(4)},
+ {simple.Node(5)},
+ },
+ },
+ },
+ },
+ {
+ name: "repulsion",
+ layers: []layer{{g: repulsion, edgeWeight: -1, weight: -1}},
+ structures: []structure{
+ {
+ resolution: 1,
+ memberships: []set{
+ 0: linksTo(0, 1, 2),
+ 1: linksTo(3, 4, 5),
+ },
+ want: 9.0, tol: 1e-10,
+ },
+ {
+ resolution: 1,
+ memberships: []set{
+ 0: linksTo(0),
+ 1: linksTo(1),
+ 2: linksTo(2),
+ 3: linksTo(3),
+ 4: linksTo(4),
+ 5: linksTo(5),
+ },
+ want: 3, tol: 1e-14,
+ },
+ },
+ wantLevels: []level{
+ {
+ q: 9.0,
+ communities: [][]graph.Node{
+ {simple.Node(0), simple.Node(1), simple.Node(2)},
+ {simple.Node(3), simple.Node(4), simple.Node(5)},
+ },
+ },
+ {
+ q: 3.0,
+ communities: [][]graph.Node{
+ {simple.Node(0)},
+ {simple.Node(1)},
+ {simple.Node(2)},
+ {simple.Node(3)},
+ {simple.Node(4)},
+ {simple.Node(5)},
+ },
+ },
+ },
+ },
+ {
+ name: "middle_east",
+ layers: []layer{
+ {g: middleEast.friends, edgeWeight: 1, weight: 1},
+ {g: middleEast.enemies, edgeWeight: -1, weight: -1},
+ },
+ structures: []structure{
+ {
+ resolution: 1,
+ memberships: []set{
+ 0: linksTo(0, 6),
+ 1: linksTo(1, 7, 9, 12),
+ 2: linksTo(2, 8, 11),
+ 3: linksTo(3, 4, 5, 10),
+ },
+ want: 33.818057455540355, tol: 1e-9,
+ },
+ {
+ resolution: 1,
+ memberships: []set{
+ 0: linksTo(0, 2, 3, 4, 5, 10),
+ 1: linksTo(1, 7, 9, 12),
+ 2: linksTo(6),
+ 3: linksTo(8, 11),
+ },
+ want: 30.92749658, tol: 1e-7,
+ },
+ {
+ resolution: 1,
+ memberships: []set{
+ 0: linksTo(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12),
+ },
+ want: 0, tol: 1e-14,
+ },
+ },
+ wantLevels: []level{
+ {
+ q: 33.818057455540355,
+ communities: [][]graph.Node{
+ {simple.Node(0), simple.Node(6)},
+ {simple.Node(1), simple.Node(7), simple.Node(9), simple.Node(12)},
+ {simple.Node(2), simple.Node(8), simple.Node(11)},
+ {simple.Node(3), simple.Node(4), simple.Node(5), simple.Node(10)},
+ },
+ },
+ {
+ q: 3.8071135430916545,
+ communities: [][]graph.Node{
+ {simple.Node(0)},
+ {simple.Node(1)},
+ {simple.Node(2)},
+ {simple.Node(3)},
+ {simple.Node(4)},
+ {simple.Node(5)},
+ {simple.Node(6)},
+ {simple.Node(7)},
+ {simple.Node(8)},
+ {simple.Node(9)},
+ {simple.Node(10)},
+ {simple.Node(11)},
+ {simple.Node(12)},
+ },
+ },
+ },
+ },
+}
+
+func TestCommunityQDirectedMultiplex(t *testing.T) {
+ for _, test := range communityDirectedMultiplexQTests {
+ g, weights, err := directedMultiplexFrom(test.layers)
+ if err != nil {
+ t.Errorf("unexpected error creating multiplex: %v", err)
+ continue
+ }
+
+ for _, structure := range test.structures {
+ communities := make([][]graph.Node, len(structure.memberships))
+ for i, c := range structure.memberships {
+ for n := range c {
+ communities[i] = append(communities[i], simple.Node(n))
+ }
+ }
+ q := QMultiplex(g, communities, weights, []float64{structure.resolution})
+ got := floats.Sum(q)
+ if !floats.EqualWithinAbsOrRel(got, structure.want, structure.tol, structure.tol) && !math.IsNaN(structure.want) {
+ for _, c := range communities {
+ sort.Sort(ordered.ByID(c))
+ }
+ t.Errorf("unexpected Q value for %q %v: got: %v %.3v want: %v",
+ test.name, communities, got, q, structure.want)
+ }
+ }
+ }
+}
+
+func TestCommunityDeltaQDirectedMultiplex(t *testing.T) {
+tests:
+ for _, test := range communityDirectedMultiplexQTests {
+ g, weights, err := directedMultiplexFrom(test.layers)
+ if err != nil {
+ t.Errorf("unexpected error creating multiplex: %v", err)
+ continue
+ }
+
+ rnd := rand.New(rand.NewSource(1)).Intn
+ for _, structure := range test.structures {
+ communityOf := make(map[int]int)
+ communities := make([][]graph.Node, len(structure.memberships))
+ for i, c := range structure.memberships {
+ for n := range c {
+ communityOf[n] = i
+ communities[i] = append(communities[i], simple.Node(n))
+ }
+ sort.Sort(ordered.ByID(communities[i]))
+ }
+ resolution := []float64{structure.resolution}
+
+ before := QMultiplex(g, communities, weights, resolution)
+
+ // We test exhaustively.
+ const all = true
+
+ l := newDirectedMultiplexLocalMover(
+ reduceDirectedMultiplex(g, nil, weights),
+ communities, weights, resolution, all)
+ if l == nil {
+ if !math.IsNaN(floats.Sum(before)) {
+ t.Errorf("unexpected nil localMover with non-NaN Q graph: Q=%.4v", before)
+ }
+ continue tests
+ }
+
+ // This is done to avoid run-to-run
+ // variation due to map iteration order.
+ sort.Sort(ordered.ByID(l.nodes))
+
+ l.shuffle(rnd)
+
+ for _, target := range l.nodes {
+ got, gotDst, gotSrc := l.deltaQ(target)
+
+ want, wantDst := math.Inf(-1), -1
+ migrated := make([][]graph.Node, len(structure.memberships))
+ for i, c := range structure.memberships {
+ for n := range c {
+ if n == target.ID() {
+ continue
+ }
+ migrated[i] = append(migrated[i], simple.Node(n))
+ }
+ sort.Sort(ordered.ByID(migrated[i]))
+ }
+
+ for i, c := range structure.memberships {
+ if i == communityOf[target.ID()] {
+ continue
+ }
+ if !(all && hasNegative(weights)) {
+ connected := false
+ search:
+ for l := 0; l < g.Depth(); l++ {
+ if weights[l] < 0 {
+ connected = true
+ break search
+ }
+ layer := g.Layer(l)
+ for n := range c {
+ if layer.HasEdgeBetween(simple.Node(n), target) {
+ connected = true
+ break search
+ }
+ }
+ }
+ if !connected {
+ continue
+ }
+ }
+ migrated[i] = append(migrated[i], target)
+ after := QMultiplex(g, migrated, weights, resolution)
+ migrated[i] = migrated[i][:len(migrated[i])-1]
+ if delta := floats.Sum(after) - floats.Sum(before); delta > want {
+ want = delta
+ wantDst = i
+ }
+ }
+
+ if !floats.EqualWithinAbsOrRel(got, want, structure.tol, structure.tol) || gotDst != wantDst {
+ t.Errorf("unexpected result moving n=%d in c=%d of %s/%.4v: got: %.4v,%d want: %.4v,%d"+
+ "\n\t%v\n\t%v",
+ target.ID(), communityOf[target.ID()], test.name, structure.resolution, got, gotDst, want, wantDst,
+ communities, migrated)
+ }
+ if gotSrc.community != communityOf[target.ID()] {
+ t.Errorf("unexpected source community index: got: %d want: %d", gotSrc, communityOf[target.ID()])
+ } else if communities[gotSrc.community][gotSrc.node].ID() != target.ID() {
+ wantNodeIdx := -1
+ for i, n := range communities[gotSrc.community] {
+ if n.ID() == target.ID() {
+ wantNodeIdx = i
+ break
+ }
+ }
+ t.Errorf("unexpected source node index: got: %d want: %d", gotSrc.node, wantNodeIdx)
+ }
+ }
+ }
+ }
+}
+
+func TestReduceQConsistencyDirectedMultiplex(t *testing.T) {
+tests:
+ for _, test := range communityDirectedMultiplexQTests {
+ g, weights, err := directedMultiplexFrom(test.layers)
+ if err != nil {
+ t.Errorf("unexpected error creating multiplex: %v", err)
+ continue
+ }
+
+ for _, structure := range test.structures {
+ if math.IsNaN(structure.want) {
+ continue tests
+ }
+
+ communities := make([][]graph.Node, len(structure.memberships))
+ for i, c := range structure.memberships {
+ for n := range c {
+ communities[i] = append(communities[i], simple.Node(n))
+ }
+ sort.Sort(ordered.ByID(communities[i]))
+ }
+
+ gQ := QMultiplex(g, communities, weights, []float64{structure.resolution})
+ gQnull := QMultiplex(g, nil, weights, nil)
+
+ cg0 := reduceDirectedMultiplex(g, nil, weights)
+ cg0Qnull := QMultiplex(cg0, cg0.Structure(), weights, nil)
+ if !floats.EqualWithinAbsOrRel(floats.Sum(gQnull), floats.Sum(cg0Qnull), structure.tol, structure.tol) {
+ t.Errorf("disagreement between null Q from method: %v and function: %v", cg0Qnull, gQnull)
+ }
+ cg0Q := QMultiplex(cg0, communities, weights, []float64{structure.resolution})
+ if !floats.EqualWithinAbsOrRel(floats.Sum(gQ), floats.Sum(cg0Q), structure.tol, structure.tol) {
+ t.Errorf("unexpected Q result after initial reduction: got: %v want :%v", cg0Q, gQ)
+ }
+
+ cg1 := reduceDirectedMultiplex(cg0, communities, weights)
+ cg1Q := QMultiplex(cg1, cg1.Structure(), weights, []float64{structure.resolution})
+ if !floats.EqualWithinAbsOrRel(floats.Sum(gQ), floats.Sum(cg1Q), structure.tol, structure.tol) {
+ t.Errorf("unexpected Q result after second reduction: got: %v want :%v", cg1Q, gQ)
+ }
+ }
+ }
+}
+
+var localDirectedMultiplexMoveTests = []struct {
+ name string
+ layers []layer
+ structures []moveStructures
+}{
+ {
+ name: "blondel",
+ layers: []layer{{g: blondel, weight: 1}, {g: blondel, weight: 0.5}},
+ structures: []moveStructures{
+ {
+ memberships: []set{
+ 0: linksTo(0, 1, 2, 4, 5),
+ 1: linksTo(3, 6, 7),
+ 2: linksTo(8, 9, 10, 12, 14, 15),
+ 3: linksTo(11, 13),
+ },
+ targetNodes: []graph.Node{simple.Node(0)},
+ resolution: 1,
+ tol: 1e-14,
+ },
+ {
+ memberships: []set{
+ 0: linksTo(0, 1, 2, 4, 5),
+ 1: linksTo(3, 6, 7),
+ 2: linksTo(8, 9, 10, 12, 14, 15),
+ 3: linksTo(11, 13),
+ },
+ targetNodes: []graph.Node{simple.Node(3)},
+ resolution: 1,
+ tol: 1e-14,
+ },
+ {
+ memberships: []set{
+ 0: linksTo(0, 1, 2, 4, 5),
+ 1: linksTo(3, 6, 7),
+ 2: linksTo(8, 9, 10, 12, 14, 15),
+ 3: linksTo(11, 13),
+ },
+ // Case to demonstrate when A_aa != k_a^𝛼.
+ targetNodes: []graph.Node{simple.Node(3), simple.Node(2)},
+ resolution: 1,
+ tol: 1e-14,
+ },
+ },
+ },
+}
+
+func TestMoveLocalDirectedMultiplex(t *testing.T) {
+ for _, test := range localDirectedMultiplexMoveTests {
+ g, weights, err := directedMultiplexFrom(test.layers)
+ if err != nil {
+ t.Errorf("unexpected error creating multiplex: %v", err)
+ continue
+ }
+
+ for _, structure := range test.structures {
+ communities := make([][]graph.Node, len(structure.memberships))
+ for i, c := range structure.memberships {
+ for n := range c {
+ communities[i] = append(communities[i], simple.Node(n))
+ }
+ sort.Sort(ordered.ByID(communities[i]))
+ }
+
+ r := reduceDirectedMultiplex(reduceDirectedMultiplex(g, nil, weights), communities, weights)
+
+ l := newDirectedMultiplexLocalMover(r, r.communities, weights, []float64{structure.resolution}, true)
+ for _, n := range structure.targetNodes {
+ dQ, dst, src := l.deltaQ(n)
+ if dQ > 0 {
+ before := floats.Sum(QMultiplex(r, l.communities, weights, []float64{structure.resolution}))
+ l.move(dst, src)
+ after := floats.Sum(QMultiplex(r, l.communities, weights, []float64{structure.resolution}))
+ want := after - before
+ if !floats.EqualWithinAbsOrRel(dQ, want, structure.tol, structure.tol) {
+ t.Errorf("unexpected deltaQ: got: %v want: %v", dQ, want)
+ }
+ }
+ }
+ }
+ }
+}
+
+func TestLouvainDirectedMultiplex(t *testing.T) {
+ const louvainIterations = 20
+
+ for _, test := range communityDirectedMultiplexQTests {
+ g, weights, err := directedMultiplexFrom(test.layers)
+ if err != nil {
+ t.Errorf("unexpected error creating multiplex: %v", err)
+ continue
+ }
+
+ if test.structures[0].resolution != 1 {
+ panic("bad test: expect resolution=1")
+ }
+ want := make([][]graph.Node, len(test.structures[0].memberships))
+ for i, c := range test.structures[0].memberships {
+ for n := range c {
+ want[i] = append(want[i], simple.Node(n))
+ }
+ sort.Sort(ordered.ByID(want[i]))
+ }
+ sort.Sort(ordered.BySliceIDs(want))
+
+ var (
+ got *ReducedDirectedMultiplex
+ bestQ = math.Inf(-1)
+ )
+ // Modularize is randomised so we do this to
+ // ensure the level tests are consistent.
+ src := rand.New(rand.NewSource(1))
+ for i := 0; i < louvainIterations; i++ {
+ r := ModularizeMultiplex(g, weights, nil, true, src).(*ReducedDirectedMultiplex)
+ if q := floats.Sum(QMultiplex(r, nil, weights, nil)); q > bestQ || math.IsNaN(q) {
+ bestQ = q
+ got = r
+
+ if math.IsNaN(q) {
+ // Don't try again for non-connected case.
+ break
+ }
+ }
+
+ var qs []float64
+ for p := r; p != nil; p = p.Expanded().(*ReducedDirectedMultiplex) {
+ qs = append(qs, floats.Sum(QMultiplex(p, nil, weights, nil)))
+ }
+
+ // Recovery of Q values is reversed.
+ if reverse(qs); !sort.Float64sAreSorted(qs) {
+ t.Errorf("Q values not monotonically increasing: %.5v", qs)
+ }
+ }
+
+ gotCommunities := got.Communities()
+ for _, c := range gotCommunities {
+ sort.Sort(ordered.ByID(c))
+ }
+ sort.Sort(ordered.BySliceIDs(gotCommunities))
+ if !reflect.DeepEqual(gotCommunities, want) {
+ t.Errorf("unexpected community membership for %s Q=%.4v:\n\tgot: %v\n\twant:%v",
+ test.name, bestQ, gotCommunities, want)
+ continue
+ }
+
+ var levels []level
+ for p := got; p != nil; p = p.Expanded().(*ReducedDirectedMultiplex) {
+ var communities [][]graph.Node
+ if p.parent != nil {
+ communities = p.parent.Communities()
+ for _, c := range communities {
+ sort.Sort(ordered.ByID(c))
+ }
+ sort.Sort(ordered.BySliceIDs(communities))
+ } else {
+ communities = reduceDirectedMultiplex(g, nil, weights).Communities()
+ }
+ q := floats.Sum(QMultiplex(p, nil, weights, nil))
+ if math.IsNaN(q) {
+ // Use an equalable flag value in place of NaN.
+ q = math.Inf(-1)
+ }
+ levels = append(levels, level{q: q, communities: communities})
+ }
+ if !reflect.DeepEqual(levels, test.wantLevels) {
+ t.Errorf("unexpected level structure:\n\tgot: %v\n\twant:%v", levels, test.wantLevels)
+ }
+ }
+}
+
+func TestNonContiguousDirectedMultiplex(t *testing.T) {
+ g := simple.NewDirectedGraph(0, 0)
+ for _, e := range []simple.Edge{
+ {F: simple.Node(0), T: simple.Node(1), W: 1},
+ {F: simple.Node(4), T: simple.Node(5), W: 1},
+ } {
+ g.SetEdge(e)
+ }
+
+ func() {
+ defer func() {
+ r := recover()
+ if r != nil {
+ t.Error("unexpected panic with non-contiguous ID range")
+ }
+ }()
+ ModularizeMultiplex(DirectedLayers{g}, nil, nil, true, nil)
+ }()
+}
+
+func BenchmarkLouvainDirectedMultiplex(b *testing.B) {
+ src := rand.New(rand.NewSource(1))
+ for i := 0; i < b.N; i++ {
+ ModularizeMultiplex(DirectedLayers{dupGraphDirected}, nil, nil, true, src)
+ }
+}
+
+func directedMultiplexFrom(raw []layer) (DirectedLayers, []float64, error) {
+ var layers []graph.Directed
+ var weights []float64
+ for _, l := range raw {
+ g := simple.NewDirectedGraph(0, 0)
+ for u, e := range l.g {
+ // Add nodes that are not defined by an edge.
+ if !g.Has(simple.Node(u)) {
+ g.AddNode(simple.Node(u))
+ }
+ for v := range e {
+ w := 1.0
+ if l.edgeWeight != 0 {
+ w = l.edgeWeight
+ }
+ g.SetEdge(simple.Edge{F: simple.Node(u), T: simple.Node(v), W: w})
+ }
+ }
+ layers = append(layers, g)
+ weights = append(weights, l.weight)
+ }
+ g, err := NewDirectedLayers(layers...)
+ if err != nil {
+ return nil, nil, err
+ }
+ return g, weights, nil
+}
diff --git a/vendor/github.com/gonum/graph/community/louvain_directed_test.go b/vendor/github.com/gonum/graph/community/louvain_directed_test.go
new file mode 100644
index 000000000000..0b54618ff0f4
--- /dev/null
+++ b/vendor/github.com/gonum/graph/community/louvain_directed_test.go
@@ -0,0 +1,589 @@
+// Copyright ©2015 The gonum Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package community
+
+import (
+ "math"
+ "math/rand"
+ "reflect"
+ "sort"
+ "testing"
+
+ "github.com/gonum/floats"
+ "github.com/gonum/graph"
+ "github.com/gonum/graph/internal/ordered"
+ "github.com/gonum/graph/simple"
+)
+
+var communityDirectedQTests = []struct {
+ name string
+ g []set
+ structures []structure
+
+ wantLevels []level
+}{
+ {
+ name: "simple_directed",
+ g: simpleDirected,
+ // community structure and modularity calculated by C++ implementation: louvain igraph.
+ // Note that louvain igraph returns Q as an unscaled value.
+ structures: []structure{
+ {
+ resolution: 1,
+ memberships: []set{
+ 0: linksTo(0, 1),
+ 1: linksTo(2, 3, 4),
+ },
+ want: 0.5714285714285716 / 7,
+ tol: 1e-10,
+ },
+ },
+ wantLevels: []level{
+ {
+ communities: [][]graph.Node{
+ {simple.Node(0), simple.Node(1)},
+ {simple.Node(2), simple.Node(3), simple.Node(4)},
+ },
+ q: 0.5714285714285716 / 7,
+ },
+ {
+ communities: [][]graph.Node{
+ {simple.Node(0)},
+ {simple.Node(1)},
+ {simple.Node(2)},
+ {simple.Node(3)},
+ {simple.Node(4)},
+ },
+ q: -1.2857142857142856 / 7,
+ },
+ },
+ },
+ {
+ name: "zachary",
+ g: zachary,
+ // community structure and modularity calculated by C++ implementation: louvain igraph.
+ // Note that louvain igraph returns Q as an unscaled value.
+ structures: []structure{
+ {
+ resolution: 1,
+ memberships: []set{
+ 0: linksTo(0, 1, 2, 3, 7, 11, 12, 13, 17, 19, 21),
+ 1: linksTo(4, 5, 6, 10, 16),
+ 2: linksTo(8, 9, 14, 15, 18, 20, 22, 26, 29, 30, 32, 33),
+ 3: linksTo(23, 24, 25, 27, 28, 31),
+ },
+ want: 34.3417721519 / 79 /* 5->6 and 6->5 because of co-equal rank */, tol: 1e-4,
+ },
+ },
+ wantLevels: []level{
+ {
+ q: 0.43470597660631316,
+ communities: [][]graph.Node{
+ {simple.Node(0), simple.Node(1), simple.Node(2), simple.Node(3), simple.Node(7), simple.Node(11), simple.Node(12), simple.Node(13), simple.Node(17), simple.Node(19), simple.Node(21)},
+ {simple.Node(4), simple.Node(5), simple.Node(6), simple.Node(10), simple.Node(16)},
+ {simple.Node(8), simple.Node(9), simple.Node(14), simple.Node(15), simple.Node(18), simple.Node(20), simple.Node(22), simple.Node(26), simple.Node(29), simple.Node(30), simple.Node(32), simple.Node(33)},
+ {simple.Node(23), simple.Node(24), simple.Node(25), simple.Node(27), simple.Node(28), simple.Node(31)},
+ },
+ },
+ {
+ q: 0.3911232174331037,
+ communities: [][]graph.Node{
+ {simple.Node(0), simple.Node(1), simple.Node(2), simple.Node(3), simple.Node(7), simple.Node(11), simple.Node(12), simple.Node(13), simple.Node(17), simple.Node(19), simple.Node(21)},
+ {simple.Node(4), simple.Node(10)},
+ {simple.Node(5), simple.Node(6), simple.Node(16)},
+ {simple.Node(8), simple.Node(30)},
+ {simple.Node(9), simple.Node(14), simple.Node(15), simple.Node(18), simple.Node(20), simple.Node(22), simple.Node(32), simple.Node(33)},
+ {simple.Node(23), simple.Node(24), simple.Node(25), simple.Node(27), simple.Node(28), simple.Node(31)},
+ {simple.Node(26), simple.Node(29)},
+ },
+ },
+ {
+ q: -0.014580996635154624,
+ communities: [][]graph.Node{
+ {simple.Node(0)},
+ {simple.Node(1)},
+ {simple.Node(2)},
+ {simple.Node(3)},
+ {simple.Node(4)},
+ {simple.Node(5)},
+ {simple.Node(6)},
+ {simple.Node(7)},
+ {simple.Node(8)},
+ {simple.Node(9)},
+ {simple.Node(10)},
+ {simple.Node(11)},
+ {simple.Node(12)},
+ {simple.Node(13)},
+ {simple.Node(14)},
+ {simple.Node(15)},
+ {simple.Node(16)},
+ {simple.Node(17)},
+ {simple.Node(18)},
+ {simple.Node(19)},
+ {simple.Node(20)},
+ {simple.Node(21)},
+ {simple.Node(22)},
+ {simple.Node(23)},
+ {simple.Node(24)},
+ {simple.Node(25)},
+ {simple.Node(26)},
+ {simple.Node(27)},
+ {simple.Node(28)},
+ {simple.Node(29)},
+ {simple.Node(30)},
+ {simple.Node(31)},
+ {simple.Node(32)},
+ {simple.Node(33)},
+ },
+ },
+ },
+ },
+ {
+ name: "blondel",
+ g: blondel,
+ // community structure and modularity calculated by C++ implementation: louvain igraph.
+ // Note that louvain igraph returns Q as an unscaled value.
+ structures: []structure{
+ {
+ resolution: 1,
+ memberships: []set{
+ 0: linksTo(0, 1, 2, 3, 4, 5, 6, 7),
+ 1: linksTo(8, 9, 10, 11, 12, 13, 14, 15),
+ },
+ want: 11.1428571429 / 28, tol: 1e-4,
+ },
+ },
+ wantLevels: []level{
+ {
+ q: 0.3979591836734694,
+ communities: [][]graph.Node{
+ {simple.Node(0), simple.Node(1), simple.Node(2), simple.Node(3), simple.Node(4), simple.Node(5), simple.Node(6), simple.Node(7)},
+ {simple.Node(8), simple.Node(9), simple.Node(10), simple.Node(11), simple.Node(12), simple.Node(13), simple.Node(14), simple.Node(15)},
+ },
+ },
+ {
+ q: 0.32525510204081637,
+ communities: [][]graph.Node{
+ {simple.Node(0), simple.Node(3), simple.Node(5), simple.Node(7)},
+ {simple.Node(1), simple.Node(2), simple.Node(4), simple.Node(6)},
+ {simple.Node(8), simple.Node(10), simple.Node(11), simple.Node(13), simple.Node(15)},
+ {simple.Node(9), simple.Node(12), simple.Node(14)},
+ },
+ },
+ {
+ q: -0.022959183673469385,
+ communities: [][]graph.Node{
+ {simple.Node(0)},
+ {simple.Node(1)},
+ {simple.Node(2)},
+ {simple.Node(3)},
+ {simple.Node(4)},
+ {simple.Node(5)},
+ {simple.Node(6)},
+ {simple.Node(7)},
+ {simple.Node(8)},
+ {simple.Node(9)},
+ {simple.Node(10)},
+ {simple.Node(11)},
+ {simple.Node(12)},
+ {simple.Node(13)},
+ {simple.Node(14)},
+ {simple.Node(15)},
+ },
+ },
+ },
+ },
+}
+
+func TestCommunityQDirected(t *testing.T) {
+ for _, test := range communityDirectedQTests {
+ g := simple.NewDirectedGraph(0, 0)
+ for u, e := range test.g {
+ // Add nodes that are not defined by an edge.
+ if !g.Has(simple.Node(u)) {
+ g.AddNode(simple.Node(u))
+ }
+ for v := range e {
+ g.SetEdge(simple.Edge{F: simple.Node(u), T: simple.Node(v), W: 1})
+ }
+ }
+ for _, structure := range test.structures {
+ communities := make([][]graph.Node, len(structure.memberships))
+ for i, c := range structure.memberships {
+ for n := range c {
+ communities[i] = append(communities[i], simple.Node(n))
+ }
+ }
+ got := Q(g, communities, structure.resolution)
+ if !floats.EqualWithinAbsOrRel(got, structure.want, structure.tol, structure.tol) && !math.IsNaN(structure.want) {
+ for _, c := range communities {
+ sort.Sort(ordered.ByID(c))
+ }
+ t.Errorf("unexpected Q value for %q %v: got: %v want: %v",
+ test.name, communities, got, structure.want)
+ }
+ }
+ }
+}
+
+func TestCommunityDeltaQDirected(t *testing.T) {
+tests:
+ for _, test := range communityDirectedQTests {
+ g := simple.NewDirectedGraph(0, 0)
+ for u, e := range test.g {
+ // Add nodes that are not defined by an edge.
+ if !g.Has(simple.Node(u)) {
+ g.AddNode(simple.Node(u))
+ }
+ for v := range e {
+ g.SetEdge(simple.Edge{F: simple.Node(u), T: simple.Node(v), W: 1})
+ }
+ }
+
+ rnd := rand.New(rand.NewSource(1)).Intn
+ for _, structure := range test.structures {
+ communityOf := make(map[int]int)
+ communities := make([][]graph.Node, len(structure.memberships))
+ for i, c := range structure.memberships {
+ for n := range c {
+ communityOf[n] = i
+ communities[i] = append(communities[i], simple.Node(n))
+ }
+ sort.Sort(ordered.ByID(communities[i]))
+ }
+
+ before := Q(g, communities, structure.resolution)
+
+ l := newDirectedLocalMover(reduceDirected(g, nil), communities, structure.resolution)
+ if l == nil {
+ if !math.IsNaN(before) {
+ t.Errorf("unexpected nil localMover with non-NaN Q graph: Q=%.4v", before)
+ }
+ continue tests
+ }
+
+ // This is done to avoid run-to-run
+ // variation due to map iteration order.
+ sort.Sort(ordered.ByID(l.nodes))
+
+ l.shuffle(rnd)
+
+ for _, target := range l.nodes {
+ got, gotDst, gotSrc := l.deltaQ(target)
+
+ want, wantDst := math.Inf(-1), -1
+ migrated := make([][]graph.Node, len(structure.memberships))
+ for i, c := range structure.memberships {
+ for n := range c {
+ if n == target.ID() {
+ continue
+ }
+ migrated[i] = append(migrated[i], simple.Node(n))
+ }
+ sort.Sort(ordered.ByID(migrated[i]))
+ }
+
+ for i, c := range structure.memberships {
+ if i == communityOf[target.ID()] {
+ continue
+ }
+ connected := false
+ for n := range c {
+ if g.HasEdgeBetween(simple.Node(n), target) {
+ connected = true
+ break
+ }
+ }
+ if !connected {
+ continue
+ }
+ migrated[i] = append(migrated[i], target)
+ after := Q(g, migrated, structure.resolution)
+ migrated[i] = migrated[i][:len(migrated[i])-1]
+ if after-before > want {
+ want = after - before
+ wantDst = i
+ }
+ }
+
+ if !floats.EqualWithinAbsOrRel(got, want, structure.tol, structure.tol) || gotDst != wantDst {
+ t.Errorf("unexpected result moving n=%d in c=%d of %s/%.4v: got: %.4v,%d want: %.4v,%d"+
+ "\n\t%v\n\t%v",
+ target.ID(), communityOf[target.ID()], test.name, structure.resolution, got, gotDst, want, wantDst,
+ communities, migrated)
+ }
+ if gotSrc.community != communityOf[target.ID()] {
+ t.Errorf("unexpected source community index: got: %d want: %d", gotSrc, communityOf[target.ID()])
+ } else if communities[gotSrc.community][gotSrc.node].ID() != target.ID() {
+ wantNodeIdx := -1
+ for i, n := range communities[gotSrc.community] {
+ if n.ID() == target.ID() {
+ wantNodeIdx = i
+ break
+ }
+ }
+ t.Errorf("unexpected source node index: got: %d want: %d", gotSrc.node, wantNodeIdx)
+ }
+ }
+ }
+ }
+}
+
+func TestReduceQConsistencyDirected(t *testing.T) {
+tests:
+ for _, test := range communityDirectedQTests {
+ g := simple.NewDirectedGraph(0, 0)
+ for u, e := range test.g {
+ // Add nodes that are not defined by an edge.
+ if !g.Has(simple.Node(u)) {
+ g.AddNode(simple.Node(u))
+ }
+ for v := range e {
+ g.SetEdge(simple.Edge{F: simple.Node(u), T: simple.Node(v), W: 1})
+ }
+ }
+
+ for _, structure := range test.structures {
+ if math.IsNaN(structure.want) {
+ continue tests
+ }
+
+ communities := make([][]graph.Node, len(structure.memberships))
+ for i, c := range structure.memberships {
+ for n := range c {
+ communities[i] = append(communities[i], simple.Node(n))
+ }
+ sort.Sort(ordered.ByID(communities[i]))
+ }
+
+ gQ := Q(g, communities, structure.resolution)
+ gQnull := Q(g, nil, 1)
+
+ cg0 := reduceDirected(g, nil)
+ cg0Qnull := Q(cg0, cg0.Structure(), 1)
+ if !floats.EqualWithinAbsOrRel(gQnull, cg0Qnull, structure.tol, structure.tol) {
+ t.Errorf("disagreement between null Q from method: %v and function: %v", cg0Qnull, gQnull)
+ }
+ cg0Q := Q(cg0, communities, structure.resolution)
+ if !floats.EqualWithinAbsOrRel(gQ, cg0Q, structure.tol, structure.tol) {
+ t.Errorf("unexpected Q result after initial reduction: got: %v want :%v", cg0Q, gQ)
+ }
+
+ cg1 := reduceDirected(cg0, communities)
+ cg1Q := Q(cg1, cg1.Structure(), structure.resolution)
+ if !floats.EqualWithinAbsOrRel(gQ, cg1Q, structure.tol, structure.tol) {
+ t.Errorf("unexpected Q result after second reduction: got: %v want :%v", cg1Q, gQ)
+ }
+ }
+ }
+}
+
+var localDirectedMoveTests = []struct {
+ name string
+ g []set
+ structures []moveStructures
+}{
+ {
+ name: "blondel",
+ g: blondel,
+ structures: []moveStructures{
+ {
+ memberships: []set{
+ 0: linksTo(0, 1, 2, 4, 5),
+ 1: linksTo(3, 6, 7),
+ 2: linksTo(8, 9, 10, 12, 14, 15),
+ 3: linksTo(11, 13),
+ },
+ targetNodes: []graph.Node{simple.Node(0)},
+ resolution: 1,
+ tol: 1e-14,
+ },
+ {
+ memberships: []set{
+ 0: linksTo(0, 1, 2, 4, 5),
+ 1: linksTo(3, 6, 7),
+ 2: linksTo(8, 9, 10, 12, 14, 15),
+ 3: linksTo(11, 13),
+ },
+ targetNodes: []graph.Node{simple.Node(3)},
+ resolution: 1,
+ tol: 1e-14,
+ },
+ {
+ memberships: []set{
+ 0: linksTo(0, 1, 2, 4, 5),
+ 1: linksTo(3, 6, 7),
+ 2: linksTo(8, 9, 10, 12, 14, 15),
+ 3: linksTo(11, 13),
+ },
+ // Case to demonstrate when A_aa != k_a^𝛼.
+ targetNodes: []graph.Node{simple.Node(3), simple.Node(2)},
+ resolution: 1,
+ tol: 1e-14,
+ },
+ },
+ },
+}
+
+func TestMoveLocalDirected(t *testing.T) {
+ for _, test := range localDirectedMoveTests {
+ g := simple.NewDirectedGraph(0, 0)
+ for u, e := range test.g {
+ // Add nodes that are not defined by an edge.
+ if !g.Has(simple.Node(u)) {
+ g.AddNode(simple.Node(u))
+ }
+ for v := range e {
+ g.SetEdge(simple.Edge{F: simple.Node(u), T: simple.Node(v), W: 1})
+ }
+ }
+
+ for _, structure := range test.structures {
+ communities := make([][]graph.Node, len(structure.memberships))
+ for i, c := range structure.memberships {
+ for n := range c {
+ communities[i] = append(communities[i], simple.Node(n))
+ }
+ sort.Sort(ordered.ByID(communities[i]))
+ }
+
+ r := reduceDirected(reduceDirected(g, nil), communities)
+
+ l := newDirectedLocalMover(r, r.communities, structure.resolution)
+ for _, n := range structure.targetNodes {
+ dQ, dst, src := l.deltaQ(n)
+ if dQ > 0 {
+ before := Q(r, l.communities, structure.resolution)
+ l.move(dst, src)
+ after := Q(r, l.communities, structure.resolution)
+ want := after - before
+ if !floats.EqualWithinAbsOrRel(dQ, want, structure.tol, structure.tol) {
+ t.Errorf("unexpected deltaQ: got: %v want: %v", dQ, want)
+ }
+ }
+ }
+ }
+ }
+}
+
+func TestModularizeDirected(t *testing.T) {
+ const louvainIterations = 20
+
+ for _, test := range communityDirectedQTests {
+ g := simple.NewDirectedGraph(0, 0)
+ for u, e := range test.g {
+ // Add nodes that are not defined by an edge.
+ if !g.Has(simple.Node(u)) {
+ g.AddNode(simple.Node(u))
+ }
+ for v := range e {
+ g.SetEdge(simple.Edge{F: simple.Node(u), T: simple.Node(v), W: 1})
+ }
+ }
+
+ if test.structures[0].resolution != 1 {
+ panic("bad test: expect resolution=1")
+ }
+ want := make([][]graph.Node, len(test.structures[0].memberships))
+ for i, c := range test.structures[0].memberships {
+ for n := range c {
+ want[i] = append(want[i], simple.Node(n))
+ }
+ sort.Sort(ordered.ByID(want[i]))
+ }
+ sort.Sort(ordered.BySliceIDs(want))
+
+ var (
+ got *ReducedDirected
+ bestQ = math.Inf(-1)
+ )
+ // Modularize is randomised so we do this to
+ // ensure the level tests are consistent.
+ src := rand.New(rand.NewSource(1))
+ for i := 0; i < louvainIterations; i++ {
+ r := Modularize(g, 1, src).(*ReducedDirected)
+ if q := Q(r, nil, 1); q > bestQ || math.IsNaN(q) {
+ bestQ = q
+ got = r
+
+ if math.IsNaN(q) {
+ // Don't try again for non-connected case.
+ break
+ }
+ }
+
+ var qs []float64
+ for p := r; p != nil; p = p.Expanded().(*ReducedDirected) {
+ qs = append(qs, Q(p, nil, 1))
+ }
+
+ // Recovery of Q values is reversed.
+ if reverse(qs); !sort.Float64sAreSorted(qs) {
+ t.Errorf("Q values not monotonically increasing: %.5v", qs)
+ }
+ }
+
+ gotCommunities := got.Communities()
+ for _, c := range gotCommunities {
+ sort.Sort(ordered.ByID(c))
+ }
+ sort.Sort(ordered.BySliceIDs(gotCommunities))
+ if !reflect.DeepEqual(gotCommunities, want) {
+ t.Errorf("unexpected community membership for %s Q=%.4v:\n\tgot: %v\n\twant:%v",
+ test.name, bestQ, gotCommunities, want)
+ continue
+ }
+
+ var levels []level
+ for p := got; p != nil; p = p.Expanded().(*ReducedDirected) {
+ var communities [][]graph.Node
+ if p.parent != nil {
+ communities = p.parent.Communities()
+ for _, c := range communities {
+ sort.Sort(ordered.ByID(c))
+ }
+ sort.Sort(ordered.BySliceIDs(communities))
+ } else {
+ communities = reduceDirected(g, nil).Communities()
+ }
+ q := Q(p, nil, 1)
+ if math.IsNaN(q) {
+ // Use an equalable flag value in place of NaN.
+ q = math.Inf(-1)
+ }
+ levels = append(levels, level{q: q, communities: communities})
+ }
+ if !reflect.DeepEqual(levels, test.wantLevels) {
+ t.Errorf("unexpected level structure:\n\tgot: %v\n\twant:%v", levels, test.wantLevels)
+ }
+ }
+}
+
+func TestNonContiguousDirected(t *testing.T) {
+ g := simple.NewDirectedGraph(0, 0)
+ for _, e := range []simple.Edge{
+ {F: simple.Node(0), T: simple.Node(1), W: 1},
+ {F: simple.Node(4), T: simple.Node(5), W: 1},
+ } {
+ g.SetEdge(e)
+ }
+
+ func() {
+ defer func() {
+ r := recover()
+ if r != nil {
+ t.Error("unexpected panic with non-contiguous ID range")
+ }
+ }()
+ Modularize(g, 1, nil)
+ }()
+}
+
+func BenchmarkLouvainDirected(b *testing.B) {
+ src := rand.New(rand.NewSource(1))
+ for i := 0; i < b.N; i++ {
+ Modularize(dupGraphDirected, 1, src)
+ }
+}
diff --git a/vendor/github.com/gonum/graph/community/louvain_test.go b/vendor/github.com/gonum/graph/community/louvain_test.go
new file mode 100644
index 000000000000..6abf3e91a25d
--- /dev/null
+++ b/vendor/github.com/gonum/graph/community/louvain_test.go
@@ -0,0 +1,277 @@
+// Copyright ©2015 The gonum Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package community
+
+import (
+ "fmt"
+ "math/rand"
+
+ "github.com/gonum/graph"
+ "github.com/gonum/graph/graphs/gen"
+ "github.com/gonum/graph/simple"
+)
+
+// set is an integer set.
+type set map[int]struct{}
+
+func linksTo(i ...int) set {
+ if len(i) == 0 {
+ return nil
+ }
+ s := make(set)
+ for _, v := range i {
+ s[v] = struct{}{}
+ }
+ return s
+}
+
+type layer struct {
+ g []set
+ edgeWeight float64 // Zero edge weight is interpreted as 1.0.
+ weight float64
+}
+
+var (
+ unconnected = []set{ /* Nodes 0-4 are implicit .*/ 5: nil}
+
+ smallDumbell = []set{
+ 0: linksTo(1, 2),
+ 1: linksTo(2),
+ 2: linksTo(3),
+ 3: linksTo(4, 5),
+ 4: linksTo(5),
+ 5: nil,
+ }
+ dumbellRepulsion = []set{
+ 0: linksTo(4),
+ 1: linksTo(5),
+ 2: nil,
+ 3: nil,
+ 4: nil,
+ 5: nil,
+ }
+
+ repulsion = []set{
+ 0: linksTo(3, 4, 5),
+ 1: linksTo(3, 4, 5),
+ 2: linksTo(3, 4, 5),
+ 3: linksTo(0, 1, 2),
+ 4: linksTo(0, 1, 2),
+ 5: linksTo(0, 1, 2),
+ }
+
+ simpleDirected = []set{
+ 0: linksTo(1),
+ 1: linksTo(0, 4),
+ 2: linksTo(1),
+ 3: linksTo(0, 4),
+ 4: linksTo(2),
+ }
+
+ // http://www.slate.com/blogs/the_world_/2014/07/17/the_middle_east_friendship_chart.html
+ middleEast = struct{ friends, complicated, enemies []set }{
+ // green cells
+ friends: []set{
+ 0: nil,
+ 1: linksTo(5, 7, 9, 12),
+ 2: linksTo(11),
+ 3: linksTo(4, 5, 10),
+ 4: linksTo(3, 5, 10),
+ 5: linksTo(1, 3, 4, 8, 10, 12),
+ 6: nil,
+ 7: linksTo(1, 12),
+ 8: linksTo(5, 9, 11),
+ 9: linksTo(1, 8, 12),
+ 10: linksTo(3, 4, 5),
+ 11: linksTo(2, 8),
+ 12: linksTo(1, 5, 7, 9),
+ },
+
+ // yellow cells
+ complicated: []set{
+ 0: linksTo(2, 4),
+ 1: linksTo(4, 8),
+ 2: linksTo(0, 3, 4, 5, 8, 9),
+ 3: linksTo(2, 8, 11),
+ 4: linksTo(0, 1, 2, 8),
+ 5: linksTo(2),
+ 6: nil,
+ 7: linksTo(9, 11),
+ 8: linksTo(1, 2, 3, 4, 10, 12),
+ 9: linksTo(2, 7, 11),
+ 10: linksTo(8),
+ 11: linksTo(3, 7, 9, 12),
+ 12: linksTo(8, 11),
+ },
+
+ // red cells
+ enemies: []set{
+ 0: linksTo(1, 3, 5, 6, 7, 8, 9, 10, 11, 12),
+ 1: linksTo(0, 2, 3, 6, 10, 11),
+ 2: linksTo(1, 6, 7, 10, 12),
+ 3: linksTo(0, 1, 6, 7, 9, 12),
+ 4: linksTo(6, 7, 9, 11, 12),
+ 5: linksTo(0, 6, 7, 9, 11),
+ 6: linksTo(0, 1, 2, 3, 4, 5, 7, 8, 9, 10, 11, 12),
+ 7: linksTo(0, 2, 3, 4, 5, 6, 8, 10),
+ 8: linksTo(0, 6, 7),
+ 9: linksTo(0, 3, 4, 5, 6, 10),
+ 10: linksTo(0, 1, 2, 6, 7, 9, 11, 12),
+ 11: linksTo(0, 1, 4, 5, 6, 10),
+ 12: linksTo(0, 2, 3, 4, 6, 10),
+ },
+ }
+
+ // W. W. Zachary, An information flow model for conflict and fission in small groups,
+ // Journal of Anthropological Research 33, 452-473 (1977).
+ //
+ // The edge list here is constructed such that all link descriptions
+ // head from a node with lower Page Rank to a node with higher Page
+ // Rank. This has no impact on undirected tests, but allows a sensible
+ // view for directed tests.
+ zachary = []set{
+ 0: nil, // rank=0.097
+ 1: linksTo(0, 2), // rank=0.05288
+ 2: linksTo(0, 32), // rank=0.05708
+ 3: linksTo(0, 1, 2), // rank=0.03586
+ 4: linksTo(0, 6, 10), // rank=0.02198
+ 5: linksTo(0, 6), // rank=0.02911
+ 6: linksTo(0, 5), // rank=0.02911
+ 7: linksTo(0, 1, 2, 3), // rank=0.02449
+ 8: linksTo(0, 2, 32, 33), // rank=0.02977
+ 9: linksTo(2, 33), // rank=0.01431
+ 10: linksTo(0, 5), // rank=0.02198
+ 11: linksTo(0), // rank=0.009565
+ 12: linksTo(0, 3), // rank=0.01464
+ 13: linksTo(0, 1, 2, 3, 33), // rank=0.02954
+ 14: linksTo(32, 33), // rank=0.01454
+ 15: linksTo(32, 33), // rank=0.01454
+ 16: linksTo(5, 6), // rank=0.01678
+ 17: linksTo(0, 1), // rank=0.01456
+ 18: linksTo(32, 33), // rank=0.01454
+ 19: linksTo(0, 1, 33), // rank=0.0196
+ 20: linksTo(32, 33), // rank=0.01454
+ 21: linksTo(0, 1), // rank=0.01456
+ 22: linksTo(32, 33), // rank=0.01454
+ 23: linksTo(32, 33), // rank=0.03152
+ 24: linksTo(27, 31), // rank=0.02108
+ 25: linksTo(23, 24, 31), // rank=0.02101
+ 26: linksTo(29, 33), // rank=0.01504
+ 27: linksTo(2, 23, 33), // rank=0.02564
+ 28: linksTo(2, 31, 33), // rank=0.01957
+ 29: linksTo(23, 32, 33), // rank=0.02629
+ 30: linksTo(1, 8, 32, 33), // rank=0.02459
+ 31: linksTo(0, 32, 33), // rank=0.03716
+ 32: linksTo(33), // rank=0.07169
+ 33: nil, // rank=0.1009
+ }
+
+ // doi:10.1088/1742-5468/2008/10/P10008 figure 1
+ //
+ // The edge list here is constructed such that all link descriptions
+ // head from a node with lower Page Rank to a node with higher Page
+ // Rank. This has no impact on undirected tests, but allows a sensible
+ // view for directed tests.
+ blondel = []set{
+ 0: linksTo(2), // rank=0.06858
+ 1: linksTo(2, 4, 7), // rank=0.05264
+ 2: nil, // rank=0.08249
+ 3: linksTo(0, 7), // rank=0.03884
+ 4: linksTo(0, 2, 10), // rank=0.06754
+ 5: linksTo(0, 2, 7, 11), // rank=0.06738
+ 6: linksTo(2, 7, 11), // rank=0.0528
+ 7: nil, // rank=0.07008
+ 8: linksTo(10), // rank=0.09226
+ 9: linksTo(8), // rank=0.05821
+ 10: nil, // rank=0.1035
+ 11: linksTo(8, 10), // rank=0.08538
+ 12: linksTo(9, 10), // rank=0.04052
+ 13: linksTo(10, 11), // rank=0.03855
+ 14: linksTo(8, 9, 10), // rank=0.05621
+ 15: linksTo(8), // rank=0.02506
+ }
+)
+
+type structure struct {
+ resolution float64
+ memberships []set
+ want, tol float64
+}
+
+type level struct {
+ q float64
+ communities [][]graph.Node
+}
+
+type moveStructures struct {
+ memberships []set
+ targetNodes []graph.Node
+
+ resolution float64
+ tol float64
+}
+
+func reverse(f []float64) {
+ for i, j := 0, len(f)-1; i < j; i, j = i+1, j-1 {
+ f[i], f[j] = f[j], f[i]
+ }
+}
+
+func hasNegative(f []float64) bool {
+ for _, v := range f {
+ if v < 0 {
+ return true
+ }
+ }
+ return false
+}
+
+var (
+ dupGraph = simple.NewUndirectedGraph(0, 0)
+ dupGraphDirected = simple.NewDirectedGraph(0, 0)
+)
+
+func init() {
+ err := gen.Duplication(dupGraph, 1000, 0.8, 0.1, 0.5, rand.New(rand.NewSource(1)))
+ if err != nil {
+ panic(err)
+ }
+
+ // Construct a directed graph from dupGraph
+ // such that every edge dupGraph is replaced
+ // with an edge that flows from the low node
+ // ID to the high node ID.
+ for _, e := range dupGraph.Edges() {
+ if e.To().ID() < e.From().ID() {
+ se := e.(simple.Edge)
+ se.F, se.T = se.T, se.F
+ e = se
+ }
+ dupGraphDirected.SetEdge(e)
+ }
+}
+
+// This init function checks the Middle East relationship data.
+func init() {
+ world := make([]set, len(middleEast.friends))
+ for i := range world {
+ world[i] = make(set)
+ }
+ for _, relationships := range [][]set{middleEast.friends, middleEast.complicated, middleEast.enemies} {
+ for i, rel := range relationships {
+ for inter := range rel {
+ if _, ok := world[i][inter]; ok {
+ panic(fmt.Sprintf("unexpected relationship: %v--%v", i, inter))
+ }
+ world[i][inter] = struct{}{}
+ }
+ }
+ }
+ for i := range world {
+ if len(world[i]) != len(middleEast.friends)-1 {
+ panic(fmt.Sprintf("missing relationship in %v: %v", i, world[i]))
+ }
+ }
+}
diff --git a/vendor/github.com/gonum/graph/community/louvain_undirected.go b/vendor/github.com/gonum/graph/community/louvain_undirected.go
new file mode 100644
index 000000000000..526cadafb9f1
--- /dev/null
+++ b/vendor/github.com/gonum/graph/community/louvain_undirected.go
@@ -0,0 +1,568 @@
+// Copyright ©2015 The gonum Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package community
+
+import (
+ "math"
+ "math/rand"
+ "sort"
+
+ "golang.org/x/tools/container/intsets"
+
+ "github.com/gonum/graph"
+ "github.com/gonum/graph/internal/ordered"
+)
+
+// qUndirected returns the modularity Q score of the graph g subdivided into the
+// given communities at the given resolution. If communities is nil, the
+// unclustered modularity score is returned. The resolution parameter
+// is γ as defined in Reichardt and Bornholdt doi:10.1103/PhysRevE.74.016110.
+// qUndirected will panic if g has any edge with negative edge weight.
+//
+// Q = 1/2m \sum_{ij} [ A_{ij} - (\gamma k_i k_j)/2m ] \delta(c_i,c_j)
+//
+// graph.Undirect may be used as a shim to allow calculation of Q for
+// directed graphs.
+func qUndirected(g graph.Undirected, communities [][]graph.Node, resolution float64) float64 {
+ nodes := g.Nodes()
+ weight := positiveWeightFuncFor(g)
+
+ // Calculate the total edge weight of the graph
+ // and the table of penetrating edge weight sums.
+ var m2 float64
+ k := make(map[int]float64, len(nodes))
+ for _, u := range nodes {
+ w := weight(u, u)
+ for _, v := range g.From(u) {
+ w += weight(u, v)
+ }
+ m2 += w
+ k[u.ID()] = w
+ }
+
+ if communities == nil {
+ var q float64
+ for _, u := range nodes {
+ kU := k[u.ID()]
+ q += weight(u, u) - resolution*kU*kU/m2
+ }
+ return q / m2
+ }
+
+ // Iterate over the communities, calculating
+ // the non-self edge weights for the upper
+ // triangle and adjust the diagonal.
+ var q float64
+ for _, c := range communities {
+ for i, u := range c {
+ kU := k[u.ID()]
+ q += weight(u, u) - resolution*kU*kU/m2
+ for _, v := range c[i+1:] {
+ q += 2 * (weight(u, v) - resolution*kU*k[v.ID()]/m2)
+ }
+ }
+ }
+ return q / m2
+}
+
+// louvainUndirected returns the hierarchical modularization of g at the given
+// resolution using the Louvain algorithm. If src is nil, rand.Intn is used as
+// the random generator. louvainUndirected will panic if g has any edge with negative edge
+// weight.
+//
+// graph.Undirect may be used as a shim to allow modularization of directed graphs.
+func louvainUndirected(g graph.Undirected, resolution float64, src *rand.Rand) *ReducedUndirected {
+ // See louvain.tex for a detailed description
+ // of the algorithm used here.
+
+ c := reduceUndirected(g, nil)
+ rnd := rand.Intn
+ if src != nil {
+ rnd = src.Intn
+ }
+ for {
+ l := newUndirectedLocalMover(c, c.communities, resolution)
+ if l == nil {
+ return c
+ }
+ if done := l.localMovingHeuristic(rnd); done {
+ return c
+ }
+ c = reduceUndirected(c, l.communities)
+ }
+}
+
+// ReducedUndirected is an undirected graph of communities derived from a
+// parent graph by reduction.
+type ReducedUndirected struct {
+ // nodes is the set of nodes held
+ // by the graph. In a ReducedUndirected
+ // the node ID is the index into
+ // nodes.
+ nodes []community
+ undirectedEdges
+
+ // communities is the community
+ // structure of the graph.
+ communities [][]graph.Node
+
+ parent *ReducedUndirected
+}
+
+var (
+ _ graph.Undirected = (*ReducedUndirected)(nil)
+ _ graph.Weighter = (*ReducedUndirected)(nil)
+ _ ReducedGraph = (*ReducedUndirected)(nil)
+)
+
+// Communities returns the community memberships of the nodes in the
+// graph used to generate the reduced graph.
+func (g *ReducedUndirected) Communities() [][]graph.Node {
+ communities := make([][]graph.Node, len(g.communities))
+ if g.parent == nil {
+ for i, members := range g.communities {
+ comm := make([]graph.Node, len(members))
+ for j, n := range members {
+ nodes := g.nodes[n.ID()].nodes
+ if len(nodes) != 1 {
+ panic("community: unexpected number of nodes in base graph community")
+ }
+ comm[j] = nodes[0]
+ }
+ communities[i] = comm
+ }
+ return communities
+ }
+ sub := g.parent.Communities()
+ for i, members := range g.communities {
+ var comm []graph.Node
+ for _, n := range members {
+ comm = append(comm, sub[n.ID()]...)
+ }
+ communities[i] = comm
+ }
+ return communities
+}
+
+// Structure returns the community structure of the current level of
+// the module clustering. The first index of the returned value
+// corresponds to the index of the nodes in the next higher level if
+// it exists. The returned value should not be mutated.
+func (g *ReducedUndirected) Structure() [][]graph.Node {
+ return g.communities
+}
+
+// Expanded returns the next lower level of the module clustering or nil
+// if at the lowest level.
+func (g *ReducedUndirected) Expanded() ReducedGraph {
+ return g.parent
+}
+
+// reduceUndirected returns a reduced graph constructed from g divided
+// into the given communities. The communities value is mutated
+// by the call to reduceUndirected. If communities is nil and g is a
+// ReducedUndirected, it is returned unaltered.
+func reduceUndirected(g graph.Undirected, communities [][]graph.Node) *ReducedUndirected {
+ if communities == nil {
+ if r, ok := g.(*ReducedUndirected); ok {
+ return r
+ }
+
+ nodes := g.Nodes()
+ // TODO(kortschak) This sort is necessary really only
+ // for testing. In practice we would not be using the
+ // community provided by the user for a Q calculation.
+ // Probably we should use a function to map the
+ // communities in the test sets to the remapped order.
+ sort.Sort(ordered.ByID(nodes))
+ communities = make([][]graph.Node, len(nodes))
+ for i := range nodes {
+ communities[i] = []graph.Node{node(i)}
+ }
+
+ weight := positiveWeightFuncFor(g)
+ r := ReducedUndirected{
+ nodes: make([]community, len(nodes)),
+ undirectedEdges: undirectedEdges{
+ edges: make([][]int, len(nodes)),
+ weights: make(map[[2]int]float64),
+ },
+ communities: communities,
+ }
+ communityOf := make(map[int]int, len(nodes))
+ for i, n := range nodes {
+ r.nodes[i] = community{id: i, nodes: []graph.Node{n}}
+ communityOf[n.ID()] = i
+ }
+ for _, u := range nodes {
+ var out []int
+ uid := communityOf[u.ID()]
+ for _, v := range g.From(u) {
+ vid := communityOf[v.ID()]
+ if vid != uid {
+ out = append(out, vid)
+ }
+ if uid < vid {
+ // Only store the weight once.
+ r.weights[[2]int{uid, vid}] = weight(u, v)
+ }
+ }
+ r.edges[uid] = out
+ }
+ return &r
+ }
+
+ // Remove zero length communities destructively.
+ var commNodes int
+ for i := 0; i < len(communities); {
+ comm := communities[i]
+ if len(comm) == 0 {
+ communities[i] = communities[len(communities)-1]
+ communities[len(communities)-1] = nil
+ communities = communities[:len(communities)-1]
+ } else {
+ commNodes += len(comm)
+ i++
+ }
+ }
+
+ r := ReducedUndirected{
+ nodes: make([]community, len(communities)),
+ undirectedEdges: undirectedEdges{
+ edges: make([][]int, len(communities)),
+ weights: make(map[[2]int]float64),
+ },
+ }
+ r.communities = make([][]graph.Node, len(communities))
+ for i := range r.communities {
+ r.communities[i] = []graph.Node{node(i)}
+ }
+ if g, ok := g.(*ReducedUndirected); ok {
+ // Make sure we retain the truncated
+ // community structure.
+ g.communities = communities
+ r.parent = g
+ }
+ weight := positiveWeightFuncFor(g)
+ communityOf := make(map[int]int, commNodes)
+ for i, comm := range communities {
+ r.nodes[i] = community{id: i, nodes: comm}
+ for _, n := range comm {
+ communityOf[n.ID()] = i
+ }
+ }
+ for uid, comm := range communities {
+ var out []int
+ for i, u := range comm {
+ r.nodes[uid].weight += weight(u, u)
+ for _, v := range comm[i+1:] {
+ r.nodes[uid].weight += 2 * weight(u, v)
+ }
+ for _, v := range g.From(u) {
+ vid := communityOf[v.ID()]
+ found := false
+ for _, e := range out {
+ if e == vid {
+ found = true
+ break
+ }
+ }
+ if !found && vid != uid {
+ out = append(out, vid)
+ }
+ if uid < vid {
+ // Only store the weight once.
+ r.weights[[2]int{uid, vid}] += weight(u, v)
+ }
+ }
+ }
+ r.edges[uid] = out
+ }
+ return &r
+}
+
+// Has returns whether the node exists within the graph.
+func (g *ReducedUndirected) Has(n graph.Node) bool {
+ id := n.ID()
+ return id >= 0 || id < len(g.nodes)
+}
+
+// Nodes returns all the nodes in the graph.
+func (g *ReducedUndirected) Nodes() []graph.Node {
+ nodes := make([]graph.Node, len(g.nodes))
+ for i := range g.nodes {
+ nodes[i] = node(i)
+ }
+ return nodes
+}
+
+// From returns all nodes in g that can be reached directly from u.
+func (g *ReducedUndirected) From(u graph.Node) []graph.Node {
+ out := g.edges[u.ID()]
+ nodes := make([]graph.Node, len(out))
+ for i, vid := range out {
+ nodes[i] = g.nodes[vid]
+ }
+ return nodes
+}
+
+// HasEdgeBetween returns whether an edge exists between nodes x and y.
+func (g *ReducedUndirected) HasEdgeBetween(x, y graph.Node) bool {
+ xid := x.ID()
+ yid := y.ID()
+ if xid == yid {
+ return false
+ }
+ if xid > yid {
+ xid, yid = yid, xid
+ }
+ _, ok := g.weights[[2]int{xid, yid}]
+ return ok
+}
+
+// Edge returns the edge from u to v if such an edge exists and nil otherwise.
+// The node v must be directly reachable from u as defined by the From method.
+func (g *ReducedUndirected) Edge(u, v graph.Node) graph.Edge {
+ uid := u.ID()
+ vid := v.ID()
+ if vid < uid {
+ uid, vid = vid, uid
+ }
+ w, ok := g.weights[[2]int{uid, vid}]
+ if !ok {
+ return nil
+ }
+ return edge{from: g.nodes[u.ID()], to: g.nodes[v.ID()], weight: w}
+}
+
+// EdgeBetween returns the edge between nodes x and y.
+func (g *ReducedUndirected) EdgeBetween(x, y graph.Node) graph.Edge {
+ return g.Edge(x, y)
+}
+
+// Weight returns the weight for the edge between x and y if Edge(x, y) returns a non-nil Edge.
+// If x and y are the same node the internal node weight is returned. If there is no joining
+// edge between the two nodes the weight value returned is zero. Weight returns true if an edge
+// exists between x and y or if x and y have the same ID, false otherwise.
+func (g *ReducedUndirected) Weight(x, y graph.Node) (w float64, ok bool) {
+ xid := x.ID()
+ yid := y.ID()
+ if xid == yid {
+ return g.nodes[xid].weight, true
+ }
+ if xid > yid {
+ xid, yid = yid, xid
+ }
+ w, ok = g.weights[[2]int{xid, yid}]
+ return w, ok
+}
+
+// undirectedLocalMover is a step in graph modularity optimization.
+type undirectedLocalMover struct {
+ g *ReducedUndirected
+
+ // nodes is the set of working nodes.
+ nodes []graph.Node
+ // edgeWeightOf is the weighted degree
+ // of each node indexed by ID.
+ edgeWeightOf []float64
+
+ // m2 is the total sum of
+ // edge weights in g.
+ m2 float64
+
+ // weight is the weight function
+ // provided by g or a function
+ // that returns the Weight value
+ // of the non-nil edge between x
+ // and y.
+ weight func(x, y graph.Node) float64
+
+ // communities is the current
+ // division of g.
+ communities [][]graph.Node
+ // memberships is a mapping between
+ // node ID and community membership.
+ memberships []int
+
+ // resolution is the Reichardt and
+ // Bornholdt γ parameter as defined
+ // in doi:10.1103/PhysRevE.74.016110.
+ resolution float64
+
+ // moved indicates that a call to
+ // move has been made since the last
+ // call to shuffle.
+ moved bool
+
+ // changed indicates that a move
+ // has been made since the creation
+ // of the local mover.
+ changed bool
+}
+
+// newUndirectedLocalMover returns a new undirectedLocalMover initialized with
+// the graph g, a set of communities and a modularity resolution parameter. The
+// node IDs of g must be contiguous in [0,n) where n is the number of nodes.
+// If g has a zero edge weight sum, nil is returned.
+func newUndirectedLocalMover(g *ReducedUndirected, communities [][]graph.Node, resolution float64) *undirectedLocalMover {
+ nodes := g.Nodes()
+ l := undirectedLocalMover{
+ g: g,
+ nodes: nodes,
+ edgeWeightOf: make([]float64, len(nodes)),
+ communities: communities,
+ memberships: make([]int, len(nodes)),
+ resolution: resolution,
+ weight: positiveWeightFuncFor(g),
+ }
+
+ // Calculate the total edge weight of the graph
+ // and degree weights for each node.
+ for _, u := range l.nodes {
+ w := l.weight(u, u)
+ for _, v := range g.From(u) {
+ w += l.weight(u, v)
+ }
+ l.edgeWeightOf[u.ID()] = w
+ l.m2 += w
+ }
+ if l.m2 == 0 {
+ return nil
+ }
+
+ // Assign membership mappings.
+ for i, c := range communities {
+ for _, u := range c {
+ l.memberships[u.ID()] = i
+ }
+ }
+
+ return &l
+}
+
+// localMovingHeuristic performs the Louvain local moving heuristic until
+// no further moves can be made. It returns a boolean indicating that the
+// undirectedLocalMover has not made any improvement to the community
+// structure and so the Louvain algorithm is done.
+func (l *undirectedLocalMover) localMovingHeuristic(rnd func(int) int) (done bool) {
+ for {
+ l.shuffle(rnd)
+ for _, n := range l.nodes {
+ dQ, dst, src := l.deltaQ(n)
+ if dQ <= 0 {
+ continue
+ }
+ l.move(dst, src)
+ }
+ if !l.moved {
+ return !l.changed
+ }
+ }
+}
+
+// shuffle performs a Fisher-Yates shuffle on the nodes held by the
+// undirectedLocalMover using the random source rnd which should return
+// an integer in the range [0,n).
+func (l *undirectedLocalMover) shuffle(rnd func(n int) int) {
+ l.moved = false
+ for i := range l.nodes[:len(l.nodes)-1] {
+ j := i + rnd(len(l.nodes)-i)
+ l.nodes[i], l.nodes[j] = l.nodes[j], l.nodes[i]
+ }
+}
+
+// move moves the node at src to the community at dst.
+func (l *undirectedLocalMover) move(dst int, src commIdx) {
+ l.moved = true
+ l.changed = true
+
+ srcComm := l.communities[src.community]
+ n := srcComm[src.node]
+
+ l.memberships[n.ID()] = dst
+
+ l.communities[dst] = append(l.communities[dst], n)
+ srcComm[src.node], srcComm[len(srcComm)-1] = srcComm[len(srcComm)-1], nil
+ l.communities[src.community] = srcComm[:len(srcComm)-1]
+}
+
+// deltaQ returns the highest gain in modularity attainable by moving
+// n from its current community to another connected community and
+// the index of the chosen destination. The index into the
+// undirectedLocalMover's communities field is returned in src if n
+// is in communities.
+func (l *undirectedLocalMover) deltaQ(n graph.Node) (deltaQ float64, dst int, src commIdx) {
+ id := n.ID()
+ a_aa := l.weight(n, n)
+ k_a := l.edgeWeightOf[id]
+ m2 := l.m2
+ gamma := l.resolution
+
+ // Find communites connected to n.
+ var connected intsets.Sparse
+ // The following for loop is equivalent to:
+ //
+ // for _, v := range l.g.From(n) {
+ // connected.Insert(l.memberships[v.ID()])
+ // }
+ //
+ // This is done to avoid an allocation.
+ for _, vid := range l.g.edges[id] {
+ connected.Insert(l.memberships[vid])
+ }
+ // Insert the node's own community.
+ connected.Insert(l.memberships[id])
+
+ // Calculate the highest modularity gain
+ // from moving into another community and
+ // keep the index of that community.
+ var dQremove float64
+ dQadd, dst, src := math.Inf(-1), -1, commIdx{-1, -1}
+ var i int
+ for connected.TakeMin(&i) {
+ c := l.communities[i]
+ var k_aC, sigma_totC float64 // C is a substitution for ^𝛼 or ^𝛽.
+ var removal bool
+ for j, u := range c {
+ uid := u.ID()
+ if uid == id {
+ if src.community != -1 {
+ panic("community: multiple sources")
+ }
+ src = commIdx{i, j}
+ removal = true
+ }
+
+ k_aC += l.weight(n, u)
+ // sigma_totC could be kept for each community
+ // and updated for moves, changing the calculation
+ // of sigma_totC here from O(n_c) to O(1), but
+ // in practice the time savings do not appear
+ // to be compelling and do not make up for the
+ // increase in code complexity and space required.
+ sigma_totC += l.edgeWeightOf[uid]
+ }
+
+ // See louvain.tex for a derivation of these equations.
+ switch {
+ case removal:
+ // The community c was the current community,
+ // so calculate the change due to removal.
+ dQremove = k_aC /*^𝛼*/ - a_aa - gamma*k_a*(sigma_totC /*^𝛼*/ -k_a)/m2
+
+ default:
+ // Otherwise calculate the change due to an addition
+ // to c and retain if it is the current best.
+ dQ := k_aC /*^𝛽*/ - gamma*k_a*sigma_totC /*^𝛽*/ /m2
+ if dQ > dQadd {
+ dQadd = dQ
+ dst = i
+ }
+ }
+ }
+
+ return 2 * (dQadd - dQremove) / m2, dst, src
+}
diff --git a/vendor/github.com/gonum/graph/community/louvain_undirected_multiplex.go b/vendor/github.com/gonum/graph/community/louvain_undirected_multiplex.go
new file mode 100644
index 000000000000..355635e58542
--- /dev/null
+++ b/vendor/github.com/gonum/graph/community/louvain_undirected_multiplex.go
@@ -0,0 +1,811 @@
+// Copyright ©2015 The gonum Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package community
+
+import (
+ "fmt"
+ "math"
+ "math/rand"
+ "sort"
+
+ "golang.org/x/tools/container/intsets"
+
+ "github.com/gonum/graph"
+ "github.com/gonum/graph/internal/ordered"
+)
+
+// UndirectedMultiplex is an undirected multiplex graph.
+type UndirectedMultiplex interface {
+ Multiplex
+
+ // Layer returns the lth layer of the
+ // multiplex graph.
+ Layer(l int) graph.Undirected
+}
+
+// qUndirectedMultiplex returns the modularity Q score of the multiplex graph layers
+// subdivided into the given communities at the given resolutions and weights. Q is
+// returned as the vector of weighted Q scores for each layer of the multiplex graph.
+// If communities is nil, the unclustered modularity score is returned.
+// If weights is nil layers are equally weighted, otherwise the length of
+// weights must equal the number of layers. If resolutions is nil, a resolution
+// of 1.0 is used for all layers, otherwise either a single element slice may be used
+// to specify a global resolution, or the length of resolutions must equal the number
+// of layers. The resolution parameter is γ as defined in Reichardt and Bornholdt
+// doi:10.1103/PhysRevE.74.016110.
+// qUndirectedMultiplex will panic if the graph has any layer weight-scaled edge with
+// negative edge weight.
+//
+// Q_{layer} = w_{layer} \sum_{ij} [ A_{layer}*_{ij} - (\gamma_{layer} k_i k_j)/2m ] \delta(c_i,c_j)
+//
+// Note that Q values for multiplex graphs are not scaled by the total layer edge weight.
+//
+// graph.Undirect may be used as a shim to allow calculation of Q for
+// directed graphs.
+func qUndirectedMultiplex(g UndirectedMultiplex, communities [][]graph.Node, weights, resolutions []float64) []float64 {
+ q := make([]float64, g.Depth())
+ nodes := g.Nodes()
+ layerWeight := 1.0
+ layerResolution := 1.0
+ if len(resolutions) == 1 {
+ layerResolution = resolutions[0]
+ }
+ for l := 0; l < g.Depth(); l++ {
+ layer := g.Layer(l)
+
+ if weights != nil {
+ layerWeight = weights[l]
+ }
+ if layerWeight == 0 {
+ continue
+ }
+
+ if len(resolutions) > 1 {
+ layerResolution = resolutions[l]
+ }
+
+ var weight func(x, y graph.Node) float64
+ if layerWeight < 0 {
+ weight = negativeWeightFuncFor(layer)
+ } else {
+ weight = positiveWeightFuncFor(layer)
+ }
+
+ // Calculate the total edge weight of the layer
+ // and the table of penetrating edge weight sums.
+ var m2 float64
+ k := make(map[int]float64, len(nodes))
+ for _, u := range nodes {
+ w := weight(u, u)
+ for _, v := range layer.From(u) {
+ w += weight(u, v)
+ }
+ m2 += w
+ k[u.ID()] = w
+ }
+
+ if communities == nil {
+ var qLayer float64
+ for _, u := range nodes {
+ kU := k[u.ID()]
+ qLayer += weight(u, u) - layerResolution*kU*kU/m2
+ }
+ q[l] = layerWeight * qLayer
+ continue
+ }
+
+ // Iterate over the communities, calculating
+ // the non-self edge weights for the upper
+ // triangle and adjust the diagonal.
+ var qLayer float64
+ for _, c := range communities {
+ for i, u := range c {
+ kU := k[u.ID()]
+ qLayer += weight(u, u) - layerResolution*kU*kU/m2
+ for _, v := range c[i+1:] {
+ qLayer += 2 * (weight(u, v) - layerResolution*kU*k[v.ID()]/m2)
+ }
+ }
+ }
+ q[l] = layerWeight * qLayer
+ }
+
+ return q
+}
+
+// UndirectedLayers implements UndirectedMultiplex.
+type UndirectedLayers []graph.Undirected
+
+// NewUndirectedLayers returns an UndirectedLayers using the provided layers
+// ensuring there is a match between IDs for each layer.
+func NewUndirectedLayers(layers ...graph.Undirected) (UndirectedLayers, error) {
+ if len(layers) == 0 {
+ return nil, nil
+ }
+ var base, next intsets.Sparse
+ for _, n := range layers[0].Nodes() {
+ base.Insert(n.ID())
+ }
+ for i, l := range layers[1:] {
+ next.Clear()
+ for _, n := range l.Nodes() {
+ next.Insert(n.ID())
+ }
+ if !next.Equals(&base) {
+ return nil, fmt.Errorf("community: layer ID mismatch between layers: %d", i+1)
+ }
+ }
+ return layers, nil
+}
+
+// Nodes returns the nodes of the receiver.
+func (g UndirectedLayers) Nodes() []graph.Node {
+ if len(g) == 0 {
+ return nil
+ }
+ return g[0].Nodes()
+}
+
+// Depth returns the depth of the multiplex graph.
+func (g UndirectedLayers) Depth() int { return len(g) }
+
+// Layer returns the lth layer of the multiplex graph.
+func (g UndirectedLayers) Layer(l int) graph.Undirected { return g[l] }
+
+// louvainUndirectedMultiplex returns the hierarchical modularization of g at the given resolution
+// using the Louvain algorithm. If all is true and g has negatively weighted layers, all
+// communities will be searched during the modularization. If src is nil, rand.Intn is
+// used as the random generator. louvainUndirectedMultiplex will panic if g has any edge with
+// edge weight that does not sign-match the layer weight.
+//
+// graph.Undirect may be used as a shim to allow modularization of directed graphs.
+func louvainUndirectedMultiplex(g UndirectedMultiplex, weights, resolutions []float64, all bool, src *rand.Rand) *ReducedUndirectedMultiplex {
+ if weights != nil && len(weights) != g.Depth() {
+ panic("community: weights vector length mismatch")
+ }
+ if resolutions != nil && len(resolutions) != 1 && len(resolutions) != g.Depth() {
+ panic("community: resolutions vector length mismatch")
+ }
+
+ // See louvain.tex for a detailed description
+ // of the algorithm used here.
+
+ c := reduceUndirectedMultiplex(g, nil, weights)
+ rnd := rand.Intn
+ if src != nil {
+ rnd = src.Intn
+ }
+ for {
+ l := newUndirectedMultiplexLocalMover(c, c.communities, weights, resolutions, all)
+ if l == nil {
+ return c
+ }
+ if done := l.localMovingHeuristic(rnd); done {
+ return c
+ }
+ c = reduceUndirectedMultiplex(c, l.communities, weights)
+ }
+}
+
+// ReducedUndirectedMultiplex is an undirected graph of communities derived from a
+// parent graph by reduction.
+type ReducedUndirectedMultiplex struct {
+ // nodes is the set of nodes held
+ // by the graph. In a ReducedUndirectedMultiplex
+ // the node ID is the index into
+ // nodes.
+ nodes []multiplexCommunity
+ layers []undirectedEdges
+
+ // communities is the community
+ // structure of the graph.
+ communities [][]graph.Node
+
+ parent *ReducedUndirectedMultiplex
+}
+
+var (
+ _ UndirectedMultiplex = (*ReducedUndirectedMultiplex)(nil)
+ _ graph.Undirected = (*undirectedLayerHandle)(nil)
+ _ graph.Weighter = (*undirectedLayerHandle)(nil)
+)
+
+// Nodes returns all the nodes in the graph.
+func (g *ReducedUndirectedMultiplex) Nodes() []graph.Node {
+ nodes := make([]graph.Node, len(g.nodes))
+ for i := range g.nodes {
+ nodes[i] = node(i)
+ }
+ return nodes
+}
+
+// Depth returns the number of layers in the multiplex graph.
+func (g *ReducedUndirectedMultiplex) Depth() int { return len(g.layers) }
+
+// Layer returns the lth layer of the multiplex graph.
+func (g *ReducedUndirectedMultiplex) Layer(l int) graph.Undirected {
+ return undirectedLayerHandle{multiplex: g, layer: l}
+}
+
+// Communities returns the community memberships of the nodes in the
+// graph used to generate the reduced graph.
+func (g *ReducedUndirectedMultiplex) Communities() [][]graph.Node {
+ communities := make([][]graph.Node, len(g.communities))
+ if g.parent == nil {
+ for i, members := range g.communities {
+ comm := make([]graph.Node, len(members))
+ for j, n := range members {
+ nodes := g.nodes[n.ID()].nodes
+ if len(nodes) != 1 {
+ panic("community: unexpected number of nodes in base graph community")
+ }
+ comm[j] = nodes[0]
+ }
+ communities[i] = comm
+ }
+ return communities
+ }
+ sub := g.parent.Communities()
+ for i, members := range g.communities {
+ var comm []graph.Node
+ for _, n := range members {
+ comm = append(comm, sub[n.ID()]...)
+ }
+ communities[i] = comm
+ }
+ return communities
+}
+
+// Structure returns the community structure of the current level of
+// the module clustering. The first index of the returned value
+// corresponds to the index of the nodes in the next higher level if
+// it exists. The returned value should not be mutated.
+func (g *ReducedUndirectedMultiplex) Structure() [][]graph.Node {
+ return g.communities
+}
+
+// Expanded returns the next lower level of the module clustering or nil
+// if at the lowest level.
+func (g *ReducedUndirectedMultiplex) Expanded() ReducedMultiplex {
+ return g.parent
+}
+
+// reduceUndirectedMultiplex returns a reduced graph constructed from g divided
+// into the given communities. The communities value is mutated
+// by the call to reduceUndirectedMultiplex. If communities is nil and g is a
+// ReducedUndirectedMultiplex, it is returned unaltered.
+func reduceUndirectedMultiplex(g UndirectedMultiplex, communities [][]graph.Node, weights []float64) *ReducedUndirectedMultiplex {
+ if communities == nil {
+ if r, ok := g.(*ReducedUndirectedMultiplex); ok {
+ return r
+ }
+
+ nodes := g.Nodes()
+ // TODO(kortschak) This sort is necessary really only
+ // for testing. In practice we would not be using the
+ // community provided by the user for a Q calculation.
+ // Probably we should use a function to map the
+ // communities in the test sets to the remapped order.
+ sort.Sort(ordered.ByID(nodes))
+ communities = make([][]graph.Node, len(nodes))
+ for i := range nodes {
+ communities[i] = []graph.Node{node(i)}
+ }
+
+ r := ReducedUndirectedMultiplex{
+ nodes: make([]multiplexCommunity, len(nodes)),
+ layers: make([]undirectedEdges, g.Depth()),
+ communities: communities,
+ }
+ communityOf := make(map[int]int, len(nodes))
+ for i, n := range nodes {
+ r.nodes[i] = multiplexCommunity{id: i, nodes: []graph.Node{n}, weights: make([]float64, depth(weights))}
+ communityOf[n.ID()] = i
+ }
+ for i := range r.layers {
+ r.layers[i] = undirectedEdges{
+ edges: make([][]int, len(nodes)),
+ weights: make(map[[2]int]float64),
+ }
+ }
+ w := 1.0
+ for l := 0; l < g.Depth(); l++ {
+ layer := g.Layer(l)
+ if weights != nil {
+ w = weights[l]
+ }
+ if w == 0 {
+ continue
+ }
+ var sign float64
+ var weight func(x, y graph.Node) float64
+ if w < 0 {
+ sign, weight = -1, negativeWeightFuncFor(layer)
+ } else {
+ sign, weight = 1, positiveWeightFuncFor(layer)
+ }
+ for _, u := range nodes {
+ var out []int
+ uid := communityOf[u.ID()]
+ for _, v := range layer.From(u) {
+ vid := communityOf[v.ID()]
+ if vid != uid {
+ out = append(out, vid)
+ }
+ if uid < vid {
+ // Only store the weight once.
+ r.layers[l].weights[[2]int{uid, vid}] = sign * weight(u, v)
+ }
+ }
+ r.layers[l].edges[uid] = out
+ }
+ }
+ return &r
+ }
+
+ // Remove zero length communities destructively.
+ var commNodes int
+ for i := 0; i < len(communities); {
+ comm := communities[i]
+ if len(comm) == 0 {
+ communities[i] = communities[len(communities)-1]
+ communities[len(communities)-1] = nil
+ communities = communities[:len(communities)-1]
+ } else {
+ commNodes += len(comm)
+ i++
+ }
+ }
+
+ r := ReducedUndirectedMultiplex{
+ nodes: make([]multiplexCommunity, len(communities)),
+ layers: make([]undirectedEdges, g.Depth()),
+ }
+ communityOf := make(map[int]int, commNodes)
+ for i, comm := range communities {
+ r.nodes[i] = multiplexCommunity{id: i, nodes: comm, weights: make([]float64, depth(weights))}
+ for _, n := range comm {
+ communityOf[n.ID()] = i
+ }
+ }
+ for i := range r.layers {
+ r.layers[i] = undirectedEdges{
+ edges: make([][]int, len(communities)),
+ weights: make(map[[2]int]float64),
+ }
+ }
+ r.communities = make([][]graph.Node, len(communities))
+ for i := range r.communities {
+ r.communities[i] = []graph.Node{node(i)}
+ }
+ if g, ok := g.(*ReducedUndirectedMultiplex); ok {
+ // Make sure we retain the truncated
+ // community structure.
+ g.communities = communities
+ r.parent = g
+ }
+ w := 1.0
+ for l := 0; l < g.Depth(); l++ {
+ layer := g.Layer(l)
+ if weights != nil {
+ w = weights[l]
+ }
+ if w == 0 {
+ continue
+ }
+ var sign float64
+ var weight func(x, y graph.Node) float64
+ if w < 0 {
+ sign, weight = -1, negativeWeightFuncFor(layer)
+ } else {
+ sign, weight = 1, positiveWeightFuncFor(layer)
+ }
+ for uid, comm := range communities {
+ var out []int
+ for i, u := range comm {
+ r.nodes[uid].weights[l] += sign * weight(u, u)
+ for _, v := range comm[i+1:] {
+ r.nodes[uid].weights[l] += 2 * sign * weight(u, v)
+ }
+ for _, v := range layer.From(u) {
+ vid := communityOf[v.ID()]
+ found := false
+ for _, e := range out {
+ if e == vid {
+ found = true
+ break
+ }
+ }
+ if !found && vid != uid {
+ out = append(out, vid)
+ }
+ if uid < vid {
+ // Only store the weight once.
+ r.layers[l].weights[[2]int{uid, vid}] += sign * weight(u, v)
+ }
+ }
+ }
+ r.layers[l].edges[uid] = out
+ }
+ }
+ return &r
+}
+
+// undirectedLayerHandle is a handle to a multiplex graph layer.
+type undirectedLayerHandle struct {
+ // multiplex is the complete
+ // multiplex graph.
+ multiplex *ReducedUndirectedMultiplex
+
+ // layer is an index into the
+ // multiplex for the current
+ // layer.
+ layer int
+}
+
+// Has returns whether the node exists within the graph.
+func (g undirectedLayerHandle) Has(n graph.Node) bool {
+ id := n.ID()
+ return id >= 0 || id < len(g.multiplex.nodes)
+}
+
+// Nodes returns all the nodes in the graph.
+func (g undirectedLayerHandle) Nodes() []graph.Node {
+ nodes := make([]graph.Node, len(g.multiplex.nodes))
+ for i := range g.multiplex.nodes {
+ nodes[i] = node(i)
+ }
+ return nodes
+}
+
+// From returns all nodes in g that can be reached directly from u.
+func (g undirectedLayerHandle) From(u graph.Node) []graph.Node {
+ out := g.multiplex.layers[g.layer].edges[u.ID()]
+ nodes := make([]graph.Node, len(out))
+ for i, vid := range out {
+ nodes[i] = g.multiplex.nodes[vid]
+ }
+ return nodes
+}
+
+// HasEdgeBetween returns whether an edge exists between nodes x and y.
+func (g undirectedLayerHandle) HasEdgeBetween(x, y graph.Node) bool {
+ xid := x.ID()
+ yid := y.ID()
+ if xid == yid {
+ return false
+ }
+ if xid > yid {
+ xid, yid = yid, xid
+ }
+ _, ok := g.multiplex.layers[g.layer].weights[[2]int{xid, yid}]
+ return ok
+}
+
+// Edge returns the edge from u to v if such an edge exists and nil otherwise.
+// The node v must be directly reachable from u as defined by the From method.
+func (g undirectedLayerHandle) Edge(u, v graph.Node) graph.Edge {
+ uid := u.ID()
+ vid := v.ID()
+ if vid < uid {
+ uid, vid = vid, uid
+ }
+ w, ok := g.multiplex.layers[g.layer].weights[[2]int{uid, vid}]
+ if !ok {
+ return nil
+ }
+ return multiplexEdge{from: g.multiplex.nodes[u.ID()], to: g.multiplex.nodes[v.ID()], weight: w}
+}
+
+// EdgeBetween returns the edge between nodes x and y.
+func (g undirectedLayerHandle) EdgeBetween(x, y graph.Node) graph.Edge {
+ return g.Edge(x, y)
+}
+
+// Weight returns the weight for the edge between x and y if Edge(x, y) returns a non-nil Edge.
+// If x and y are the same node the internal node weight is returned. If there is no joining
+// edge between the two nodes the weight value returned is zero. Weight returns true if an edge
+// exists between x and y or if x and y have the same ID, false otherwise.
+func (g undirectedLayerHandle) Weight(x, y graph.Node) (w float64, ok bool) {
+ xid := x.ID()
+ yid := y.ID()
+ if xid == yid {
+ return g.multiplex.nodes[xid].weights[g.layer], true
+ }
+ if xid > yid {
+ xid, yid = yid, xid
+ }
+ w, ok = g.multiplex.layers[g.layer].weights[[2]int{xid, yid}]
+ return w, ok
+}
+
+// undirectedMultiplexLocalMover is a step in graph modularity optimization.
+type undirectedMultiplexLocalMover struct {
+ g *ReducedUndirectedMultiplex
+
+ // nodes is the set of working nodes.
+ nodes []graph.Node
+ // edgeWeightOf is the weighted degree
+ // of each node indexed by ID.
+ edgeWeightOf [][]float64
+
+ // m2 is the total sum of
+ // edge weights in g.
+ m2 []float64
+
+ // weight is the weight function
+ // provided by g or a function
+ // that returns the Weight value
+ // of the non-nil edge between x
+ // and y.
+ weight []func(x, y graph.Node) float64
+
+ // communities is the current
+ // division of g.
+ communities [][]graph.Node
+ // memberships is a mapping between
+ // node ID and community membership.
+ memberships []int
+
+ // resolution is the Reichardt and
+ // Bornholdt γ parameter as defined
+ // in doi:10.1103/PhysRevE.74.016110.
+ resolutions []float64
+
+ // weights is the layer weights for
+ // the modularisation.
+ weights []float64
+
+ // searchAll specifies whether the local
+ // mover should consider non-connected
+ // communities during the local moving
+ // heuristic.
+ searchAll bool
+
+ // moved indicates that a call to
+ // move has been made since the last
+ // call to shuffle.
+ moved bool
+
+ // changed indicates that a move
+ // has been made since the creation
+ // of the local mover.
+ changed bool
+}
+
+// newUndirectedMultiplexLocalMover returns a new undirectedMultiplexLocalMover initialized with
+// the graph g, a set of communities and a modularity resolution parameter. The
+// node IDs of g must be contiguous in [0,n) where n is the number of nodes.
+// If g has a zero edge weight sum, nil is returned.
+func newUndirectedMultiplexLocalMover(g *ReducedUndirectedMultiplex, communities [][]graph.Node, weights, resolutions []float64, all bool) *undirectedMultiplexLocalMover {
+ nodes := g.Nodes()
+ l := undirectedMultiplexLocalMover{
+ g: g,
+ nodes: nodes,
+ edgeWeightOf: make([][]float64, g.Depth()),
+ m2: make([]float64, g.Depth()),
+ communities: communities,
+ memberships: make([]int, len(nodes)),
+ resolutions: resolutions,
+ weights: weights,
+ weight: make([]func(x, y graph.Node) float64, g.Depth()),
+ }
+
+ // Calculate the total edge weight of the graph
+ // and degree weights for each node.
+ var zero int
+ for i := 0; i < g.Depth(); i++ {
+ l.edgeWeightOf[i] = make([]float64, len(nodes))
+ var weight func(x, y graph.Node) float64
+
+ if weights != nil {
+ if weights[i] == 0 {
+ zero++
+ continue
+ }
+ if weights[i] < 0 {
+ weight = negativeWeightFuncFor(g.Layer(i))
+ l.searchAll = all
+ } else {
+ weight = positiveWeightFuncFor(g.Layer(i))
+ }
+ } else {
+ weight = positiveWeightFuncFor(g.Layer(i))
+ }
+
+ l.weight[i] = weight
+ layer := g.Layer(i)
+ for _, u := range l.nodes {
+ w := weight(u, u)
+ for _, v := range layer.From(u) {
+ w += weight(u, v)
+ }
+ l.edgeWeightOf[i][u.ID()] = w
+ l.m2[i] += w
+ }
+ if l.m2[i] == 0 {
+ zero++
+ }
+ }
+ if zero == g.Depth() {
+ return nil
+ }
+
+ // Assign membership mappings.
+ for i, c := range communities {
+ for _, u := range c {
+ l.memberships[u.ID()] = i
+ }
+ }
+
+ return &l
+}
+
+// localMovingHeuristic performs the Louvain local moving heuristic until
+// no further moves can be made. It returns a boolean indicating that the
+// undirectedMultiplexLocalMover has not made any improvement to the community
+// structure and so the Louvain algorithm is done.
+func (l *undirectedMultiplexLocalMover) localMovingHeuristic(rnd func(int) int) (done bool) {
+ for {
+ l.shuffle(rnd)
+ for _, n := range l.nodes {
+ dQ, dst, src := l.deltaQ(n)
+ if dQ <= 0 {
+ continue
+ }
+ l.move(dst, src)
+ }
+ if !l.moved {
+ return !l.changed
+ }
+ }
+}
+
+// shuffle performs a Fisher-Yates shuffle on the nodes held by the
+// undirectedMultiplexLocalMover using the random source rnd which should return
+// an integer in the range [0,n).
+func (l *undirectedMultiplexLocalMover) shuffle(rnd func(n int) int) {
+ l.moved = false
+ for i := range l.nodes[:len(l.nodes)-1] {
+ j := i + rnd(len(l.nodes)-i)
+ l.nodes[i], l.nodes[j] = l.nodes[j], l.nodes[i]
+ }
+}
+
+// move moves the node at src to the community at dst.
+func (l *undirectedMultiplexLocalMover) move(dst int, src commIdx) {
+ l.moved = true
+ l.changed = true
+
+ srcComm := l.communities[src.community]
+ n := srcComm[src.node]
+
+ l.memberships[n.ID()] = dst
+
+ l.communities[dst] = append(l.communities[dst], n)
+ srcComm[src.node], srcComm[len(srcComm)-1] = srcComm[len(srcComm)-1], nil
+ l.communities[src.community] = srcComm[:len(srcComm)-1]
+}
+
+// deltaQ returns the highest gain in modularity attainable by moving
+// n from its current community to another connected community and
+// the index of the chosen destination. The index into the
+// undirectedMultiplexLocalMover's communities field is returned in src if n
+// is in communities.
+func (l *undirectedMultiplexLocalMover) deltaQ(n graph.Node) (deltaQ float64, dst int, src commIdx) {
+ id := n.ID()
+
+ var iterator minTaker
+ if l.searchAll {
+ iterator = &dense{n: len(l.communities)}
+ } else {
+ // Find communities connected to n.
+ var connected intsets.Sparse
+ // The following for loop is equivalent to:
+ //
+ // for i := 0; i < l.g.Depth(); i++ {
+ // for _, v := range l.g.Layer(i).From(n) {
+ // connected.Insert(l.memberships[v.ID()])
+ // }
+ // }
+ //
+ // This is done to avoid an allocation for
+ // each layer.
+ for _, layer := range l.g.layers {
+ for _, vid := range layer.edges[id] {
+ connected.Insert(l.memberships[vid])
+ }
+ }
+ // Insert the node's own community.
+ connected.Insert(l.memberships[id])
+ iterator = &connected
+ }
+
+ // Calculate the highest modularity gain
+ // from moving into another community and
+ // keep the index of that community.
+ var dQremove float64
+ dQadd, dst, src := math.Inf(-1), -1, commIdx{-1, -1}
+ var i int
+ for iterator.TakeMin(&i) {
+ c := l.communities[i]
+ var removal bool
+ var _dQadd float64
+ for layer := 0; layer < l.g.Depth(); layer++ {
+ m2 := l.m2[layer]
+ if m2 == 0 {
+ // Do not consider layers with zero sum edge weight.
+ continue
+ }
+ w := 1.0
+ if l.weights != nil {
+ w = l.weights[layer]
+ }
+ if w == 0 {
+ // Do not consider layers with zero weighting.
+ continue
+ }
+
+ var k_aC, sigma_totC float64 // C is a substitution for ^𝛼 or ^𝛽.
+ removal = false
+ for j, u := range c {
+ uid := u.ID()
+ if uid == id {
+ // Only mark and check src community on the first layer.
+ if layer == 0 {
+ if src.community != -1 {
+ panic("community: multiple sources")
+ }
+ src = commIdx{i, j}
+ }
+ removal = true
+ }
+
+ k_aC += l.weight[layer](n, u)
+ // sigma_totC could be kept for each community
+ // and updated for moves, changing the calculation
+ // of sigma_totC here from O(n_c) to O(1), but
+ // in practice the time savings do not appear
+ // to be compelling and do not make up for the
+ // increase in code complexity and space required.
+ sigma_totC += l.edgeWeightOf[layer][uid]
+ }
+
+ a_aa := l.weight[layer](n, n)
+ k_a := l.edgeWeightOf[layer][id]
+ gamma := 1.0
+ if l.resolutions != nil {
+ if len(l.resolutions) == 1 {
+ gamma = l.resolutions[0]
+ } else {
+ gamma = l.resolutions[layer]
+ }
+ }
+
+ // See louvain.tex for a derivation of these equations.
+ // The weighting term, w, is described in V Traag,
+ // "Algorithms and dynamical models for communities and
+ // reputation in social networks", chapter 5.
+ // http://www.traag.net/wp/wp-content/papercite-data/pdf/traag_algorithms_2013.pdf
+ switch {
+ case removal:
+ // The community c was the current community,
+ // so calculate the change due to removal.
+ dQremove += w * (k_aC /*^𝛼*/ - a_aa - gamma*k_a*(sigma_totC /*^𝛼*/ -k_a)/m2)
+
+ default:
+ // Otherwise calculate the change due to an addition
+ // to c.
+ _dQadd += w * (k_aC /*^𝛽*/ - gamma*k_a*sigma_totC /*^𝛽*/ /m2)
+ }
+ }
+ if !removal && _dQadd > dQadd {
+ dQadd = _dQadd
+ dst = i
+ }
+ }
+
+ return 2 * (dQadd - dQremove), dst, src
+}
diff --git a/vendor/github.com/gonum/graph/community/louvain_undirected_multiplex_test.go b/vendor/github.com/gonum/graph/community/louvain_undirected_multiplex_test.go
new file mode 100644
index 000000000000..2bf28cb54f8f
--- /dev/null
+++ b/vendor/github.com/gonum/graph/community/louvain_undirected_multiplex_test.go
@@ -0,0 +1,669 @@
+// Copyright ©2015 The gonum Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package community
+
+import (
+ "math"
+ "math/rand"
+ "reflect"
+ "sort"
+ "testing"
+
+ "github.com/gonum/floats"
+ "github.com/gonum/graph"
+ "github.com/gonum/graph/internal/ordered"
+ "github.com/gonum/graph/simple"
+)
+
+var communityUndirectedMultiplexQTests = []struct {
+ name string
+ layers []layer
+ structures []structure
+
+ wantLevels []level
+}{
+ {
+ name: "unconnected",
+ layers: []layer{{g: unconnected, weight: 1}},
+ structures: []structure{
+ {
+ resolution: 1,
+ memberships: []set{
+ 0: linksTo(0),
+ 1: linksTo(1),
+ 2: linksTo(2),
+ 3: linksTo(3),
+ 4: linksTo(4),
+ 5: linksTo(5),
+ },
+ want: math.NaN(),
+ },
+ },
+ wantLevels: []level{
+ {
+ q: math.Inf(-1), // Here math.Inf(-1) is used as a place holder for NaN to allow use of reflect.DeepEqual.
+ communities: [][]graph.Node{
+ {simple.Node(0)},
+ {simple.Node(1)},
+ {simple.Node(2)},
+ {simple.Node(3)},
+ {simple.Node(4)},
+ {simple.Node(5)},
+ },
+ },
+ },
+ },
+ {
+ name: "small_dumbell",
+ layers: []layer{
+ {g: smallDumbell, edgeWeight: 1, weight: 1},
+ {g: dumbellRepulsion, edgeWeight: -1, weight: -1},
+ },
+ structures: []structure{
+ {
+ resolution: 1,
+ memberships: []set{
+ 0: linksTo(0, 1, 2),
+ 1: linksTo(3, 4, 5),
+ },
+ want: 7.0, tol: 1e-10,
+ },
+ {
+ resolution: 1,
+ memberships: []set{
+ 0: linksTo(0, 1, 2, 3, 4, 5),
+ },
+ want: 0, tol: 1e-14,
+ },
+ },
+ wantLevels: []level{
+ {
+ q: 7.0,
+ communities: [][]graph.Node{
+ {simple.Node(0), simple.Node(1), simple.Node(2)},
+ {simple.Node(3), simple.Node(4), simple.Node(5)},
+ },
+ },
+ {
+ q: -1.4285714285714284,
+ communities: [][]graph.Node{
+ {simple.Node(0)},
+ {simple.Node(1)},
+ {simple.Node(2)},
+ {simple.Node(3)},
+ {simple.Node(4)},
+ {simple.Node(5)},
+ },
+ },
+ },
+ },
+ {
+ name: "small_dumbell_twice",
+ layers: []layer{
+ {g: smallDumbell, weight: 0.5},
+ {g: smallDumbell, weight: 0.5},
+ },
+ structures: []structure{
+ {
+ resolution: 1,
+ memberships: []set{
+ 0: linksTo(0, 1, 2),
+ 1: linksTo(3, 4, 5),
+ },
+ want: 5, tol: 1e-10,
+ },
+ {
+ resolution: 1,
+ memberships: []set{
+ 0: linksTo(0, 1, 2, 3, 4, 5),
+ },
+ want: 0, tol: 1e-14,
+ },
+ },
+ wantLevels: []level{
+ {
+ q: 0.35714285714285715 * 14,
+ communities: [][]graph.Node{
+ {simple.Node(0), simple.Node(1), simple.Node(2)},
+ {simple.Node(3), simple.Node(4), simple.Node(5)},
+ },
+ },
+ {
+ q: -0.17346938775510204 * 14,
+ communities: [][]graph.Node{
+ {simple.Node(0)},
+ {simple.Node(1)},
+ {simple.Node(2)},
+ {simple.Node(3)},
+ {simple.Node(4)},
+ {simple.Node(5)},
+ },
+ },
+ },
+ },
+ {
+ name: "repulsion",
+ layers: []layer{{g: repulsion, edgeWeight: -1, weight: -1}},
+ structures: []structure{
+ {
+ resolution: 1,
+ memberships: []set{
+ 0: linksTo(0, 1, 2),
+ 1: linksTo(3, 4, 5),
+ },
+ want: 9.0, tol: 1e-10,
+ },
+ {
+ resolution: 1,
+ memberships: []set{
+ 0: linksTo(0),
+ 1: linksTo(1),
+ 2: linksTo(2),
+ 3: linksTo(3),
+ 4: linksTo(4),
+ 5: linksTo(5),
+ },
+ want: 3, tol: 1e-14,
+ },
+ },
+ wantLevels: []level{
+ {
+ q: 9.0,
+ communities: [][]graph.Node{
+ {simple.Node(0), simple.Node(1), simple.Node(2)},
+ {simple.Node(3), simple.Node(4), simple.Node(5)},
+ },
+ },
+ {
+ q: 3.0,
+ communities: [][]graph.Node{
+ {simple.Node(0)},
+ {simple.Node(1)},
+ {simple.Node(2)},
+ {simple.Node(3)},
+ {simple.Node(4)},
+ {simple.Node(5)},
+ },
+ },
+ },
+ },
+ {
+ name: "middle_east",
+ layers: []layer{
+ {g: middleEast.friends, edgeWeight: 1, weight: 1},
+ {g: middleEast.enemies, edgeWeight: -1, weight: -1},
+ },
+ structures: []structure{
+ {
+ resolution: 1,
+ memberships: []set{
+ 0: linksTo(0, 6),
+ 1: linksTo(1, 7, 9, 12),
+ 2: linksTo(2, 8, 11),
+ 3: linksTo(3, 4, 5, 10),
+ },
+ want: 33.8180574555, tol: 1e-9,
+ },
+ {
+ resolution: 1,
+ memberships: []set{
+ 0: linksTo(0, 2, 3, 4, 5, 10),
+ 1: linksTo(1, 7, 9, 12),
+ 2: linksTo(6),
+ 3: linksTo(8, 11),
+ },
+ want: 30.92749658, tol: 1e-7,
+ },
+ {
+ resolution: 1,
+ memberships: []set{
+ 0: linksTo(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12),
+ },
+ want: 0, tol: 1e-14,
+ },
+ },
+ wantLevels: []level{
+ {
+ q: 33.818057455540355,
+ communities: [][]graph.Node{
+ {simple.Node(0), simple.Node(6)},
+ {simple.Node(1), simple.Node(7), simple.Node(9), simple.Node(12)},
+ {simple.Node(2), simple.Node(8), simple.Node(11)},
+ {simple.Node(3), simple.Node(4), simple.Node(5), simple.Node(10)},
+ },
+ },
+ {
+ q: 3.8071135430916545,
+ communities: [][]graph.Node{
+ {simple.Node(0)},
+ {simple.Node(1)},
+ {simple.Node(2)},
+ {simple.Node(3)},
+ {simple.Node(4)},
+ {simple.Node(5)},
+ {simple.Node(6)},
+ {simple.Node(7)},
+ {simple.Node(8)},
+ {simple.Node(9)},
+ {simple.Node(10)},
+ {simple.Node(11)},
+ {simple.Node(12)},
+ },
+ },
+ },
+ },
+}
+
+func TestCommunityQUndirectedMultiplex(t *testing.T) {
+ for _, test := range communityUndirectedMultiplexQTests {
+ g, weights, err := undirectedMultiplexFrom(test.layers)
+ if err != nil {
+ t.Errorf("unexpected error creating multiplex: %v", err)
+ continue
+ }
+
+ for _, structure := range test.structures {
+ communities := make([][]graph.Node, len(structure.memberships))
+ for i, c := range structure.memberships {
+ for n := range c {
+ communities[i] = append(communities[i], simple.Node(n))
+ }
+ }
+ q := QMultiplex(g, communities, weights, []float64{structure.resolution})
+ got := floats.Sum(q)
+ if !floats.EqualWithinAbsOrRel(got, structure.want, structure.tol, structure.tol) && !math.IsNaN(structure.want) {
+ for _, c := range communities {
+ sort.Sort(ordered.ByID(c))
+ }
+ t.Errorf("unexpected Q value for %q %v: got: %v %.3v want: %v",
+ test.name, communities, got, q, structure.want)
+ }
+ }
+ }
+}
+
+func TestCommunityDeltaQUndirectedMultiplex(t *testing.T) {
+tests:
+ for _, test := range communityUndirectedMultiplexQTests {
+ g, weights, err := undirectedMultiplexFrom(test.layers)
+ if err != nil {
+ t.Errorf("unexpected error creating multiplex: %v", err)
+ continue
+ }
+
+ rnd := rand.New(rand.NewSource(1)).Intn
+ for _, structure := range test.structures {
+ communityOf := make(map[int]int)
+ communities := make([][]graph.Node, len(structure.memberships))
+ for i, c := range structure.memberships {
+ for n := range c {
+ communityOf[n] = i
+ communities[i] = append(communities[i], simple.Node(n))
+ }
+ sort.Sort(ordered.ByID(communities[i]))
+ }
+ resolution := []float64{structure.resolution}
+
+ before := QMultiplex(g, communities, weights, resolution)
+
+ // We test exhaustively.
+ const all = true
+
+ l := newUndirectedMultiplexLocalMover(
+ reduceUndirectedMultiplex(g, nil, weights),
+ communities, weights, resolution, all)
+ if l == nil {
+ if !math.IsNaN(floats.Sum(before)) {
+ t.Errorf("unexpected nil localMover with non-NaN Q graph: Q=%.4v", before)
+ }
+ continue tests
+ }
+
+ // This is done to avoid run-to-run
+ // variation due to map iteration order.
+ sort.Sort(ordered.ByID(l.nodes))
+
+ l.shuffle(rnd)
+
+ for _, target := range l.nodes {
+ got, gotDst, gotSrc := l.deltaQ(target)
+
+ want, wantDst := math.Inf(-1), -1
+ migrated := make([][]graph.Node, len(structure.memberships))
+ for i, c := range structure.memberships {
+ for n := range c {
+ if n == target.ID() {
+ continue
+ }
+ migrated[i] = append(migrated[i], simple.Node(n))
+ }
+ sort.Sort(ordered.ByID(migrated[i]))
+ }
+
+ for i, c := range structure.memberships {
+ if i == communityOf[target.ID()] {
+ continue
+ }
+ if !(all && hasNegative(weights)) {
+ connected := false
+ search:
+ for l := 0; l < g.Depth(); l++ {
+ if weights[l] < 0 {
+ connected = true
+ break search
+ }
+ layer := g.Layer(l)
+ for n := range c {
+ if layer.HasEdgeBetween(simple.Node(n), target) {
+ connected = true
+ break search
+ }
+ }
+ }
+ if !connected {
+ continue
+ }
+ }
+ migrated[i] = append(migrated[i], target)
+ after := QMultiplex(g, migrated, weights, resolution)
+ migrated[i] = migrated[i][:len(migrated[i])-1]
+ if delta := floats.Sum(after) - floats.Sum(before); delta > want {
+ want = delta
+ wantDst = i
+ }
+ }
+
+ if !floats.EqualWithinAbsOrRel(got, want, structure.tol, structure.tol) || gotDst != wantDst {
+ t.Errorf("unexpected result moving n=%d in c=%d of %s/%.4v: got: %.4v,%d want: %.4v,%d"+
+ "\n\t%v\n\t%v",
+ target.ID(), communityOf[target.ID()], test.name, structure.resolution, got, gotDst, want, wantDst,
+ communities, migrated)
+ }
+ if gotSrc.community != communityOf[target.ID()] {
+ t.Errorf("unexpected source community index: got: %d want: %d", gotSrc, communityOf[target.ID()])
+ } else if communities[gotSrc.community][gotSrc.node].ID() != target.ID() {
+ wantNodeIdx := -1
+ for i, n := range communities[gotSrc.community] {
+ if n.ID() == target.ID() {
+ wantNodeIdx = i
+ break
+ }
+ }
+ t.Errorf("unexpected source node index: got: %d want: %d", gotSrc.node, wantNodeIdx)
+ }
+ }
+ }
+ }
+}
+
+func TestReduceQConsistencyUndirectedMultiplex(t *testing.T) {
+tests:
+ for _, test := range communityUndirectedMultiplexQTests {
+ g, weights, err := undirectedMultiplexFrom(test.layers)
+ if err != nil {
+ t.Errorf("unexpected error creating multiplex: %v", err)
+ continue
+ }
+
+ for _, structure := range test.structures {
+ if math.IsNaN(structure.want) {
+ continue tests
+ }
+
+ communities := make([][]graph.Node, len(structure.memberships))
+ for i, c := range structure.memberships {
+ for n := range c {
+ communities[i] = append(communities[i], simple.Node(n))
+ }
+ sort.Sort(ordered.ByID(communities[i]))
+ }
+
+ gQ := QMultiplex(g, communities, weights, []float64{structure.resolution})
+ gQnull := QMultiplex(g, nil, weights, nil)
+
+ cg0 := reduceUndirectedMultiplex(g, nil, weights)
+ cg0Qnull := QMultiplex(cg0, cg0.Structure(), weights, nil)
+ if !floats.EqualWithinAbsOrRel(floats.Sum(gQnull), floats.Sum(cg0Qnull), structure.tol, structure.tol) {
+ t.Errorf("disagreement between null Q from method: %v and function: %v", cg0Qnull, gQnull)
+ }
+ cg0Q := QMultiplex(cg0, communities, weights, []float64{structure.resolution})
+ if !floats.EqualWithinAbsOrRel(floats.Sum(gQ), floats.Sum(cg0Q), structure.tol, structure.tol) {
+ t.Errorf("unexpected Q result after initial reduction: got: %v want :%v", cg0Q, gQ)
+ }
+
+ cg1 := reduceUndirectedMultiplex(cg0, communities, weights)
+ cg1Q := QMultiplex(cg1, cg1.Structure(), weights, []float64{structure.resolution})
+ if !floats.EqualWithinAbsOrRel(floats.Sum(gQ), floats.Sum(cg1Q), structure.tol, structure.tol) {
+ t.Errorf("unexpected Q result after second reduction: got: %v want :%v", cg1Q, gQ)
+ }
+ }
+ }
+}
+
+var localUndirectedMultiplexMoveTests = []struct {
+ name string
+ layers []layer
+ structures []moveStructures
+}{
+ {
+ name: "blondel",
+ layers: []layer{{g: blondel, weight: 1}, {g: blondel, weight: 0.5}},
+ structures: []moveStructures{
+ {
+ memberships: []set{
+ 0: linksTo(0, 1, 2, 4, 5),
+ 1: linksTo(3, 6, 7),
+ 2: linksTo(8, 9, 10, 12, 14, 15),
+ 3: linksTo(11, 13),
+ },
+ targetNodes: []graph.Node{simple.Node(0)},
+ resolution: 1,
+ tol: 1e-14,
+ },
+ {
+ memberships: []set{
+ 0: linksTo(0, 1, 2, 4, 5),
+ 1: linksTo(3, 6, 7),
+ 2: linksTo(8, 9, 10, 12, 14, 15),
+ 3: linksTo(11, 13),
+ },
+ targetNodes: []graph.Node{simple.Node(3)},
+ resolution: 1,
+ tol: 1e-14,
+ },
+ {
+ memberships: []set{
+ 0: linksTo(0, 1, 2, 4, 5),
+ 1: linksTo(3, 6, 7),
+ 2: linksTo(8, 9, 10, 12, 14, 15),
+ 3: linksTo(11, 13),
+ },
+ // Case to demonstrate when A_aa != k_a^𝛼.
+ targetNodes: []graph.Node{simple.Node(3), simple.Node(2)},
+ resolution: 1,
+ tol: 1e-14,
+ },
+ },
+ },
+}
+
+func TestMoveLocalUndirectedMultiplex(t *testing.T) {
+ for _, test := range localUndirectedMultiplexMoveTests {
+ g, weights, err := undirectedMultiplexFrom(test.layers)
+ if err != nil {
+ t.Errorf("unexpected error creating multiplex: %v", err)
+ continue
+ }
+
+ for _, structure := range test.structures {
+ communities := make([][]graph.Node, len(structure.memberships))
+ for i, c := range structure.memberships {
+ for n := range c {
+ communities[i] = append(communities[i], simple.Node(n))
+ }
+ sort.Sort(ordered.ByID(communities[i]))
+ }
+
+ r := reduceUndirectedMultiplex(reduceUndirectedMultiplex(g, nil, weights), communities, weights)
+
+ l := newUndirectedMultiplexLocalMover(r, r.communities, weights, []float64{structure.resolution}, true)
+ for _, n := range structure.targetNodes {
+ dQ, dst, src := l.deltaQ(n)
+ if dQ > 0 {
+ before := floats.Sum(QMultiplex(r, l.communities, weights, []float64{structure.resolution}))
+ l.move(dst, src)
+ after := floats.Sum(QMultiplex(r, l.communities, weights, []float64{structure.resolution}))
+ want := after - before
+ if !floats.EqualWithinAbsOrRel(dQ, want, structure.tol, structure.tol) {
+ t.Errorf("unexpected deltaQ: got: %v want: %v", dQ, want)
+ }
+ }
+ }
+ }
+ }
+}
+
+func TestLouvainMultiplex(t *testing.T) {
+ const louvainIterations = 20
+
+ for _, test := range communityUndirectedMultiplexQTests {
+ g, weights, err := undirectedMultiplexFrom(test.layers)
+ if err != nil {
+ t.Errorf("unexpected error creating multiplex: %v", err)
+ continue
+ }
+
+ if test.structures[0].resolution != 1 {
+ panic("bad test: expect resolution=1")
+ }
+ want := make([][]graph.Node, len(test.structures[0].memberships))
+ for i, c := range test.structures[0].memberships {
+ for n := range c {
+ want[i] = append(want[i], simple.Node(n))
+ }
+ sort.Sort(ordered.ByID(want[i]))
+ }
+ sort.Sort(ordered.BySliceIDs(want))
+
+ var (
+ got *ReducedUndirectedMultiplex
+ bestQ = math.Inf(-1)
+ )
+ // Modularize is randomised so we do this to
+ // ensure the level tests are consistent.
+ src := rand.New(rand.NewSource(1))
+ for i := 0; i < louvainIterations; i++ {
+ r := ModularizeMultiplex(g, weights, nil, true, src).(*ReducedUndirectedMultiplex)
+ if q := floats.Sum(QMultiplex(r, nil, weights, nil)); q > bestQ || math.IsNaN(q) {
+ bestQ = q
+ got = r
+
+ if math.IsNaN(q) {
+ // Don't try again for non-connected case.
+ break
+ }
+ }
+
+ var qs []float64
+ for p := r; p != nil; p = p.Expanded().(*ReducedUndirectedMultiplex) {
+ qs = append(qs, floats.Sum(QMultiplex(p, nil, weights, nil)))
+ }
+
+ // Recovery of Q values is reversed.
+ if reverse(qs); !sort.Float64sAreSorted(qs) {
+ t.Errorf("Q values not monotonically increasing: %.5v", qs)
+ }
+ }
+
+ gotCommunities := got.Communities()
+ for _, c := range gotCommunities {
+ sort.Sort(ordered.ByID(c))
+ }
+ sort.Sort(ordered.BySliceIDs(gotCommunities))
+ if !reflect.DeepEqual(gotCommunities, want) {
+ t.Errorf("unexpected community membership for %s Q=%.4v:\n\tgot: %v\n\twant:%v",
+ test.name, bestQ, gotCommunities, want)
+ continue
+ }
+
+ var levels []level
+ for p := got; p != nil; p = p.Expanded().(*ReducedUndirectedMultiplex) {
+ var communities [][]graph.Node
+ if p.parent != nil {
+ communities = p.parent.Communities()
+ for _, c := range communities {
+ sort.Sort(ordered.ByID(c))
+ }
+ sort.Sort(ordered.BySliceIDs(communities))
+ } else {
+ communities = reduceUndirectedMultiplex(g, nil, weights).Communities()
+ }
+ q := floats.Sum(QMultiplex(p, nil, weights, nil))
+ if math.IsNaN(q) {
+ // Use an equalable flag value in place of NaN.
+ q = math.Inf(-1)
+ }
+ levels = append(levels, level{q: q, communities: communities})
+ }
+ if !reflect.DeepEqual(levels, test.wantLevels) {
+ t.Errorf("unexpected level structure:\n\tgot: %v\n\twant:%v", levels, test.wantLevels)
+ }
+ }
+}
+
+func TestNonContiguousUndirectedMultiplex(t *testing.T) {
+ g := simple.NewUndirectedGraph(0, 0)
+ for _, e := range []simple.Edge{
+ {F: simple.Node(0), T: simple.Node(1), W: 1},
+ {F: simple.Node(4), T: simple.Node(5), W: 1},
+ } {
+ g.SetEdge(e)
+ }
+
+ func() {
+ defer func() {
+ r := recover()
+ if r != nil {
+ t.Error("unexpected panic with non-contiguous ID range")
+ }
+ }()
+ ModularizeMultiplex(UndirectedLayers{g}, nil, nil, true, nil)
+ }()
+}
+
+func BenchmarkLouvainMultiplex(b *testing.B) {
+ src := rand.New(rand.NewSource(1))
+ for i := 0; i < b.N; i++ {
+ ModularizeMultiplex(UndirectedLayers{dupGraph}, nil, nil, true, src)
+ }
+}
+
+func undirectedMultiplexFrom(raw []layer) (UndirectedLayers, []float64, error) {
+ var layers []graph.Undirected
+ var weights []float64
+ for _, l := range raw {
+ g := simple.NewUndirectedGraph(0, 0)
+ for u, e := range l.g {
+ // Add nodes that are not defined by an edge.
+ if !g.Has(simple.Node(u)) {
+ g.AddNode(simple.Node(u))
+ }
+ for v := range e {
+ w := 1.0
+ if l.edgeWeight != 0 {
+ w = l.edgeWeight
+ }
+ g.SetEdge(simple.Edge{F: simple.Node(u), T: simple.Node(v), W: w})
+ }
+ }
+ layers = append(layers, g)
+ weights = append(weights, l.weight)
+ }
+ g, err := NewUndirectedLayers(layers...)
+ if err != nil {
+ return nil, nil, err
+ }
+ return g, weights, nil
+}
diff --git a/vendor/github.com/gonum/graph/community/louvain_undirected_test.go b/vendor/github.com/gonum/graph/community/louvain_undirected_test.go
new file mode 100644
index 000000000000..b567e839f472
--- /dev/null
+++ b/vendor/github.com/gonum/graph/community/louvain_undirected_test.go
@@ -0,0 +1,648 @@
+// Copyright ©2015 The gonum Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package community
+
+import (
+ "math"
+ "math/rand"
+ "reflect"
+ "sort"
+ "testing"
+
+ "github.com/gonum/floats"
+ "github.com/gonum/graph"
+ "github.com/gonum/graph/internal/ordered"
+ "github.com/gonum/graph/simple"
+)
+
+var communityUndirectedQTests = []struct {
+ name string
+ g []set
+ structures []structure
+
+ wantLevels []level
+}{
+ // The java reference implementation is available from http://www.ludowaltman.nl/slm/.
+ {
+ name: "unconnected",
+ g: unconnected,
+ structures: []structure{
+ {
+ resolution: 1,
+ memberships: []set{
+ 0: linksTo(0),
+ 1: linksTo(1),
+ 2: linksTo(2),
+ 3: linksTo(3),
+ 4: linksTo(4),
+ 5: linksTo(5),
+ },
+ want: math.NaN(),
+ },
+ },
+ wantLevels: []level{
+ {
+ q: math.Inf(-1), // Here math.Inf(-1) is used as a place holder for NaN to allow use of reflect.DeepEqual.
+ communities: [][]graph.Node{
+ {simple.Node(0)},
+ {simple.Node(1)},
+ {simple.Node(2)},
+ {simple.Node(3)},
+ {simple.Node(4)},
+ {simple.Node(5)},
+ },
+ },
+ },
+ },
+ {
+ name: "small_dumbell",
+ g: smallDumbell,
+ structures: []structure{
+ {
+ resolution: 1,
+ // community structure and modularity calculated by java reference implementation.
+ memberships: []set{
+ 0: linksTo(0, 1, 2),
+ 1: linksTo(3, 4, 5),
+ },
+ want: 0.357, tol: 1e-3,
+ },
+ {
+ resolution: 1,
+ memberships: []set{
+ 0: linksTo(0, 1, 2, 3, 4, 5),
+ },
+ // theoretical expectation.
+ want: 0, tol: 1e-14,
+ },
+ },
+ wantLevels: []level{
+ {
+ q: 0.35714285714285715,
+ communities: [][]graph.Node{
+ {simple.Node(0), simple.Node(1), simple.Node(2)},
+ {simple.Node(3), simple.Node(4), simple.Node(5)},
+ },
+ },
+ {
+ q: -0.17346938775510204,
+ communities: [][]graph.Node{
+ {simple.Node(0)},
+ {simple.Node(1)},
+ {simple.Node(2)},
+ {simple.Node(3)},
+ {simple.Node(4)},
+ {simple.Node(5)},
+ },
+ },
+ },
+ },
+ {
+ name: "zachary",
+ g: zachary,
+ structures: []structure{
+ {
+ resolution: 1,
+ // community structure and modularity from doi: 10.1140/epjb/e2013-40829-0
+ memberships: []set{
+ 0: linksTo(0, 1, 2, 3, 7, 11, 12, 13, 17, 19, 21),
+ 1: linksTo(4, 5, 6, 10, 16),
+ 2: linksTo(8, 9, 14, 15, 18, 20, 22, 26, 29, 30, 32, 33),
+ 3: linksTo(23, 24, 25, 27, 28, 31),
+ },
+ // Noted to be the optimal modularisation in the paper above.
+ want: 0.4198, tol: 1e-4,
+ },
+ {
+ resolution: 0.5,
+ // community structure and modularity calculated by java reference implementation.
+ memberships: []set{
+ 0: linksTo(0, 1, 2, 3, 4, 5, 6, 7, 9, 10, 11, 12, 13, 16, 17, 19, 21),
+ 1: linksTo(8, 14, 15, 18, 20, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33),
+ },
+ want: 0.6218, tol: 1e-3,
+ },
+ {
+ resolution: 2,
+ // community structure and modularity calculated by java reference implementation.
+ memberships: []set{
+ 0: linksTo(14, 18, 20, 22, 32, 33, 15),
+ 1: linksTo(0, 1, 11, 17, 19, 21),
+ 2: linksTo(2, 3, 7, 9, 12, 13),
+ 3: linksTo(4, 5, 6, 10, 16),
+ 4: linksTo(24, 25, 28, 31),
+ 5: linksTo(23, 26, 27, 29),
+ 6: linksTo(8, 30),
+ },
+ want: 0.1645, tol: 1e-3,
+ },
+ },
+ wantLevels: []level{
+ {
+ q: 0.4197896120973044,
+ communities: [][]graph.Node{
+ {simple.Node(0), simple.Node(1), simple.Node(2), simple.Node(3), simple.Node(7), simple.Node(11), simple.Node(12), simple.Node(13), simple.Node(17), simple.Node(19), simple.Node(21)},
+ {simple.Node(4), simple.Node(5), simple.Node(6), simple.Node(10), simple.Node(16)},
+ {simple.Node(8), simple.Node(9), simple.Node(14), simple.Node(15), simple.Node(18), simple.Node(20), simple.Node(22), simple.Node(26), simple.Node(29), simple.Node(30), simple.Node(32), simple.Node(33)},
+ {simple.Node(23), simple.Node(24), simple.Node(25), simple.Node(27), simple.Node(28), simple.Node(31)},
+ },
+ },
+ {
+ q: 0.39907955292570674,
+ communities: [][]graph.Node{
+ {simple.Node(0), simple.Node(1), simple.Node(2), simple.Node(3), simple.Node(7), simple.Node(11), simple.Node(12), simple.Node(13), simple.Node(17), simple.Node(19), simple.Node(21)},
+ {simple.Node(4), simple.Node(10)},
+ {simple.Node(5), simple.Node(6), simple.Node(16)},
+ {simple.Node(8), simple.Node(9), simple.Node(14), simple.Node(15), simple.Node(18), simple.Node(20), simple.Node(22), simple.Node(26), simple.Node(29), simple.Node(30), simple.Node(32), simple.Node(33)},
+ {simple.Node(23), simple.Node(24), simple.Node(25), simple.Node(27), simple.Node(28), simple.Node(31)},
+ },
+ },
+ {
+ q: -0.04980276134122286,
+ communities: [][]graph.Node{
+ {simple.Node(0)},
+ {simple.Node(1)},
+ {simple.Node(2)},
+ {simple.Node(3)},
+ {simple.Node(4)},
+ {simple.Node(5)},
+ {simple.Node(6)},
+ {simple.Node(7)},
+ {simple.Node(8)},
+ {simple.Node(9)},
+ {simple.Node(10)},
+ {simple.Node(11)},
+ {simple.Node(12)},
+ {simple.Node(13)},
+ {simple.Node(14)},
+ {simple.Node(15)},
+ {simple.Node(16)},
+ {simple.Node(17)},
+ {simple.Node(18)},
+ {simple.Node(19)},
+ {simple.Node(20)},
+ {simple.Node(21)},
+ {simple.Node(22)},
+ {simple.Node(23)},
+ {simple.Node(24)},
+ {simple.Node(25)},
+ {simple.Node(26)},
+ {simple.Node(27)},
+ {simple.Node(28)},
+ {simple.Node(29)},
+ {simple.Node(30)},
+ {simple.Node(31)},
+ {simple.Node(32)},
+ {simple.Node(33)},
+ },
+ },
+ },
+ },
+ {
+ name: "blondel",
+ g: blondel,
+ structures: []structure{
+ {
+ resolution: 1,
+ // community structure and modularity calculated by java reference implementation.
+ memberships: []set{
+ 0: linksTo(0, 1, 2, 3, 4, 5, 6, 7),
+ 1: linksTo(8, 9, 10, 11, 12, 13, 14, 15),
+ },
+ want: 0.3922, tol: 1e-4,
+ },
+ },
+ wantLevels: []level{
+ {
+ q: 0.39221938775510207,
+ communities: [][]graph.Node{
+ {simple.Node(0), simple.Node(1), simple.Node(2), simple.Node(3), simple.Node(4), simple.Node(5), simple.Node(6), simple.Node(7)},
+ {simple.Node(8), simple.Node(9), simple.Node(10), simple.Node(11), simple.Node(12), simple.Node(13), simple.Node(14), simple.Node(15)},
+ },
+ },
+ {
+ q: 0.34630102040816324,
+ communities: [][]graph.Node{
+ {simple.Node(0), simple.Node(1), simple.Node(2), simple.Node(4), simple.Node(5)},
+ {simple.Node(3), simple.Node(6), simple.Node(7)},
+ {simple.Node(8), simple.Node(9), simple.Node(10), simple.Node(12), simple.Node(14), simple.Node(15)},
+ {simple.Node(11), simple.Node(13)},
+ },
+ },
+ {
+ q: -0.07142857142857144,
+ communities: [][]graph.Node{
+ {simple.Node(0)},
+ {simple.Node(1)},
+ {simple.Node(2)},
+ {simple.Node(3)},
+ {simple.Node(4)},
+ {simple.Node(5)},
+ {simple.Node(6)},
+ {simple.Node(7)},
+ {simple.Node(8)},
+ {simple.Node(9)},
+ {simple.Node(10)},
+ {simple.Node(11)},
+ {simple.Node(12)},
+ {simple.Node(13)},
+ {simple.Node(14)},
+ {simple.Node(15)},
+ },
+ },
+ },
+ },
+}
+
+func TestCommunityQUndirected(t *testing.T) {
+ for _, test := range communityUndirectedQTests {
+ g := simple.NewUndirectedGraph(0, 0)
+ for u, e := range test.g {
+ // Add nodes that are not defined by an edge.
+ if !g.Has(simple.Node(u)) {
+ g.AddNode(simple.Node(u))
+ }
+ for v := range e {
+ g.SetEdge(simple.Edge{F: simple.Node(u), T: simple.Node(v), W: 1})
+ }
+ }
+ for _, structure := range test.structures {
+ communities := make([][]graph.Node, len(structure.memberships))
+ for i, c := range structure.memberships {
+ for n := range c {
+ communities[i] = append(communities[i], simple.Node(n))
+ }
+ }
+ got := Q(g, communities, structure.resolution)
+ if !floats.EqualWithinAbsOrRel(got, structure.want, structure.tol, structure.tol) && !math.IsNaN(structure.want) {
+ for _, c := range communities {
+ sort.Sort(ordered.ByID(c))
+ }
+ t.Errorf("unexpected Q value for %q %v: got: %v want: %v",
+ test.name, communities, got, structure.want)
+ }
+ }
+ }
+}
+
+func TestCommunityDeltaQUndirected(t *testing.T) {
+tests:
+ for _, test := range communityUndirectedQTests {
+ g := simple.NewUndirectedGraph(0, 0)
+ for u, e := range test.g {
+ // Add nodes that are not defined by an edge.
+ if !g.Has(simple.Node(u)) {
+ g.AddNode(simple.Node(u))
+ }
+ for v := range e {
+ g.SetEdge(simple.Edge{F: simple.Node(u), T: simple.Node(v), W: 1})
+ }
+ }
+
+ rnd := rand.New(rand.NewSource(1)).Intn
+ for _, structure := range test.structures {
+ communityOf := make(map[int]int)
+ communities := make([][]graph.Node, len(structure.memberships))
+ for i, c := range structure.memberships {
+ for n := range c {
+ communityOf[n] = i
+ communities[i] = append(communities[i], simple.Node(n))
+ }
+ sort.Sort(ordered.ByID(communities[i]))
+ }
+
+ before := Q(g, communities, structure.resolution)
+
+ l := newUndirectedLocalMover(reduceUndirected(g, nil), communities, structure.resolution)
+ if l == nil {
+ if !math.IsNaN(before) {
+ t.Errorf("unexpected nil localMover with non-NaN Q graph: Q=%.4v", before)
+ }
+ continue tests
+ }
+
+ // This is done to avoid run-to-run
+ // variation due to map iteration order.
+ sort.Sort(ordered.ByID(l.nodes))
+
+ l.shuffle(rnd)
+
+ for _, target := range l.nodes {
+ got, gotDst, gotSrc := l.deltaQ(target)
+
+ want, wantDst := math.Inf(-1), -1
+ migrated := make([][]graph.Node, len(structure.memberships))
+ for i, c := range structure.memberships {
+ for n := range c {
+ if n == target.ID() {
+ continue
+ }
+ migrated[i] = append(migrated[i], simple.Node(n))
+ }
+ sort.Sort(ordered.ByID(migrated[i]))
+ }
+
+ for i, c := range structure.memberships {
+ if i == communityOf[target.ID()] {
+ continue
+ }
+ connected := false
+ for n := range c {
+ if g.HasEdgeBetween(simple.Node(n), target) {
+ connected = true
+ break
+ }
+ }
+ if !connected {
+ continue
+ }
+ migrated[i] = append(migrated[i], target)
+ after := Q(g, migrated, structure.resolution)
+ migrated[i] = migrated[i][:len(migrated[i])-1]
+ if after-before > want {
+ want = after - before
+ wantDst = i
+ }
+ }
+
+ if !floats.EqualWithinAbsOrRel(got, want, structure.tol, structure.tol) || gotDst != wantDst {
+ t.Errorf("unexpected result moving n=%d in c=%d of %s/%.4v: got: %.4v,%d want: %.4v,%d"+
+ "\n\t%v\n\t%v",
+ target.ID(), communityOf[target.ID()], test.name, structure.resolution, got, gotDst, want, wantDst,
+ communities, migrated)
+ }
+ if gotSrc.community != communityOf[target.ID()] {
+ t.Errorf("unexpected source community index: got: %d want: %d", gotSrc, communityOf[target.ID()])
+ } else if communities[gotSrc.community][gotSrc.node].ID() != target.ID() {
+ wantNodeIdx := -1
+ for i, n := range communities[gotSrc.community] {
+ if n.ID() == target.ID() {
+ wantNodeIdx = i
+ break
+ }
+ }
+ t.Errorf("unexpected source node index: got: %d want: %d", gotSrc.node, wantNodeIdx)
+ }
+ }
+ }
+ }
+}
+
+func TestReduceQConsistencyUndirected(t *testing.T) {
+tests:
+ for _, test := range communityUndirectedQTests {
+ g := simple.NewUndirectedGraph(0, 0)
+ for u, e := range test.g {
+ // Add nodes that are not defined by an edge.
+ if !g.Has(simple.Node(u)) {
+ g.AddNode(simple.Node(u))
+ }
+ for v := range e {
+ g.SetEdge(simple.Edge{F: simple.Node(u), T: simple.Node(v), W: 1})
+ }
+ }
+
+ for _, structure := range test.structures {
+ if math.IsNaN(structure.want) {
+ continue tests
+ }
+
+ communities := make([][]graph.Node, len(structure.memberships))
+ for i, c := range structure.memberships {
+ for n := range c {
+ communities[i] = append(communities[i], simple.Node(n))
+ }
+ sort.Sort(ordered.ByID(communities[i]))
+ }
+
+ gQ := Q(g, communities, structure.resolution)
+ gQnull := Q(g, nil, 1)
+
+ cg0 := reduceUndirected(g, nil)
+ cg0Qnull := Q(cg0, cg0.Structure(), 1)
+ if !floats.EqualWithinAbsOrRel(gQnull, cg0Qnull, structure.tol, structure.tol) {
+ t.Errorf("disagreement between null Q from method: %v and function: %v", cg0Qnull, gQnull)
+ }
+ cg0Q := Q(cg0, communities, structure.resolution)
+ if !floats.EqualWithinAbsOrRel(gQ, cg0Q, structure.tol, structure.tol) {
+ t.Errorf("unexpected Q result after initial reduction: got: %v want :%v", cg0Q, gQ)
+ }
+
+ cg1 := reduceUndirected(cg0, communities)
+ cg1Q := Q(cg1, cg1.Structure(), structure.resolution)
+ if !floats.EqualWithinAbsOrRel(gQ, cg1Q, structure.tol, structure.tol) {
+ t.Errorf("unexpected Q result after second reduction: got: %v want :%v", cg1Q, gQ)
+ }
+ }
+ }
+}
+
+var localUndirectedMoveTests = []struct {
+ name string
+ g []set
+ structures []moveStructures
+}{
+ {
+ name: "blondel",
+ g: blondel,
+ structures: []moveStructures{
+ {
+ memberships: []set{
+ 0: linksTo(0, 1, 2, 4, 5),
+ 1: linksTo(3, 6, 7),
+ 2: linksTo(8, 9, 10, 12, 14, 15),
+ 3: linksTo(11, 13),
+ },
+ targetNodes: []graph.Node{simple.Node(0)},
+ resolution: 1,
+ tol: 1e-14,
+ },
+ {
+ memberships: []set{
+ 0: linksTo(0, 1, 2, 4, 5),
+ 1: linksTo(3, 6, 7),
+ 2: linksTo(8, 9, 10, 12, 14, 15),
+ 3: linksTo(11, 13),
+ },
+ targetNodes: []graph.Node{simple.Node(3)},
+ resolution: 1,
+ tol: 1e-14,
+ },
+ {
+ memberships: []set{
+ 0: linksTo(0, 1, 2, 4, 5),
+ 1: linksTo(3, 6, 7),
+ 2: linksTo(8, 9, 10, 12, 14, 15),
+ 3: linksTo(11, 13),
+ },
+ // Case to demonstrate when A_aa != k_a^𝛼.
+ targetNodes: []graph.Node{simple.Node(3), simple.Node(2)},
+ resolution: 1,
+ tol: 1e-14,
+ },
+ },
+ },
+}
+
+func TestMoveLocalUndirected(t *testing.T) {
+ for _, test := range localUndirectedMoveTests {
+ g := simple.NewUndirectedGraph(0, 0)
+ for u, e := range test.g {
+ // Add nodes that are not defined by an edge.
+ if !g.Has(simple.Node(u)) {
+ g.AddNode(simple.Node(u))
+ }
+ for v := range e {
+ g.SetEdge(simple.Edge{F: simple.Node(u), T: simple.Node(v), W: 1})
+ }
+ }
+
+ for _, structure := range test.structures {
+ communities := make([][]graph.Node, len(structure.memberships))
+ for i, c := range structure.memberships {
+ for n := range c {
+ communities[i] = append(communities[i], simple.Node(n))
+ }
+ sort.Sort(ordered.ByID(communities[i]))
+ }
+
+ r := reduceUndirected(reduceUndirected(g, nil), communities)
+
+ l := newUndirectedLocalMover(r, r.communities, structure.resolution)
+ for _, n := range structure.targetNodes {
+ dQ, dst, src := l.deltaQ(n)
+ if dQ > 0 {
+ before := Q(r, l.communities, structure.resolution)
+ l.move(dst, src)
+ after := Q(r, l.communities, structure.resolution)
+ want := after - before
+ if !floats.EqualWithinAbsOrRel(dQ, want, structure.tol, structure.tol) {
+ t.Errorf("unexpected deltaQ: got: %v want: %v", dQ, want)
+ }
+ }
+ }
+ }
+ }
+}
+
+func TestModularizeUndirected(t *testing.T) {
+ const louvainIterations = 20
+
+ for _, test := range communityUndirectedQTests {
+ g := simple.NewUndirectedGraph(0, 0)
+ for u, e := range test.g {
+ // Add nodes that are not defined by an edge.
+ if !g.Has(simple.Node(u)) {
+ g.AddNode(simple.Node(u))
+ }
+ for v := range e {
+ g.SetEdge(simple.Edge{F: simple.Node(u), T: simple.Node(v), W: 1})
+ }
+ }
+
+ if test.structures[0].resolution != 1 {
+ panic("bad test: expect resolution=1")
+ }
+ want := make([][]graph.Node, len(test.structures[0].memberships))
+ for i, c := range test.structures[0].memberships {
+ for n := range c {
+ want[i] = append(want[i], simple.Node(n))
+ }
+ sort.Sort(ordered.ByID(want[i]))
+ }
+ sort.Sort(ordered.BySliceIDs(want))
+
+ var (
+ got *ReducedUndirected
+ bestQ = math.Inf(-1)
+ )
+ // Modularize is randomised so we do this to
+ // ensure the level tests are consistent.
+ src := rand.New(rand.NewSource(1))
+ for i := 0; i < louvainIterations; i++ {
+ r := Modularize(g, 1, src).(*ReducedUndirected)
+ if q := Q(r, nil, 1); q > bestQ || math.IsNaN(q) {
+ bestQ = q
+ got = r
+
+ if math.IsNaN(q) {
+ // Don't try again for non-connected case.
+ break
+ }
+ }
+
+ var qs []float64
+ for p := r; p != nil; p = p.Expanded().(*ReducedUndirected) {
+ qs = append(qs, Q(p, nil, 1))
+ }
+
+ // Recovery of Q values is reversed.
+ if reverse(qs); !sort.Float64sAreSorted(qs) {
+ t.Errorf("Q values not monotonically increasing: %.5v", qs)
+ }
+ }
+
+ gotCommunities := got.Communities()
+ for _, c := range gotCommunities {
+ sort.Sort(ordered.ByID(c))
+ }
+ sort.Sort(ordered.BySliceIDs(gotCommunities))
+ if !reflect.DeepEqual(gotCommunities, want) {
+ t.Errorf("unexpected community membership for %s Q=%.4v:\n\tgot: %v\n\twant:%v",
+ test.name, bestQ, gotCommunities, want)
+ continue
+ }
+
+ var levels []level
+ for p := got; p != nil; p = p.Expanded().(*ReducedUndirected) {
+ var communities [][]graph.Node
+ if p.parent != nil {
+ communities = p.parent.Communities()
+ for _, c := range communities {
+ sort.Sort(ordered.ByID(c))
+ }
+ sort.Sort(ordered.BySliceIDs(communities))
+ } else {
+ communities = reduceUndirected(g, nil).Communities()
+ }
+ q := Q(p, nil, 1)
+ if math.IsNaN(q) {
+ // Use an equalable flag value in place of NaN.
+ q = math.Inf(-1)
+ }
+ levels = append(levels, level{q: q, communities: communities})
+ }
+ if !reflect.DeepEqual(levels, test.wantLevels) {
+ t.Errorf("unexpected level structure:\n\tgot: %v\n\twant:%v", levels, test.wantLevels)
+ }
+ }
+}
+
+func TestNonContiguousUndirected(t *testing.T) {
+ g := simple.NewUndirectedGraph(0, 0)
+ for _, e := range []simple.Edge{
+ {F: simple.Node(0), T: simple.Node(1), W: 1},
+ {F: simple.Node(4), T: simple.Node(5), W: 1},
+ } {
+ g.SetEdge(e)
+ }
+
+ func() {
+ defer func() {
+ r := recover()
+ if r != nil {
+ t.Error("unexpected panic with non-contiguous ID range")
+ }
+ }()
+ Modularize(g, 1, nil)
+ }()
+}
+
+func BenchmarkLouvain(b *testing.B) {
+ src := rand.New(rand.NewSource(1))
+ for i := 0; i < b.N; i++ {
+ Modularize(dupGraph, 1, src)
+ }
+}
diff --git a/vendor/github.com/gonum/graph/community/printgraphs.go b/vendor/github.com/gonum/graph/community/printgraphs.go
new file mode 100644
index 000000000000..c434a33bc738
--- /dev/null
+++ b/vendor/github.com/gonum/graph/community/printgraphs.go
@@ -0,0 +1,142 @@
+// Copyright ©2016 The gonum Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build ignore
+
+// printgraphs allows us to generate a consistent directed view of
+// a set of edges that follows a reasonably real-world-meaningful
+// graph. The interpretation of the links in the resulting directed
+// graphs are either "suggests" in the context of a Page Ranking or
+// possibly "looks up to" in the Zachary graph.
+package main
+
+import (
+ "fmt"
+ "sort"
+
+ "github.com/gonum/graph"
+ "github.com/gonum/graph/internal/ordered"
+ "github.com/gonum/graph/network"
+ "github.com/gonum/graph/simple"
+)
+
+// set is an integer set.
+type set map[int]struct{}
+
+func linksTo(i ...int) set {
+ if len(i) == 0 {
+ return nil
+ }
+ s := make(set)
+ for _, v := range i {
+ s[v] = struct{}{}
+ }
+ return s
+}
+
+var (
+ zachary = []set{
+ 0: linksTo(1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 13, 17, 19, 21, 31),
+ 1: linksTo(2, 3, 7, 13, 17, 19, 21, 30),
+ 2: linksTo(3, 7, 8, 9, 13, 27, 28, 32),
+ 3: linksTo(7, 12, 13),
+ 4: linksTo(6, 10),
+ 5: linksTo(6, 10, 16),
+ 6: linksTo(16),
+ 8: linksTo(30, 32, 33),
+ 9: linksTo(33),
+ 13: linksTo(33),
+ 14: linksTo(32, 33),
+ 15: linksTo(32, 33),
+ 18: linksTo(32, 33),
+ 19: linksTo(33),
+ 20: linksTo(32, 33),
+ 22: linksTo(32, 33),
+ 23: linksTo(25, 27, 29, 32, 33),
+ 24: linksTo(25, 27, 31),
+ 25: linksTo(31),
+ 26: linksTo(29, 33),
+ 27: linksTo(33),
+ 28: linksTo(31, 33),
+ 29: linksTo(32, 33),
+ 30: linksTo(32, 33),
+ 31: linksTo(32, 33),
+ 32: linksTo(33),
+ 33: nil,
+ }
+
+ blondel = []set{
+ 0: linksTo(2, 3, 4, 5),
+ 1: linksTo(2, 4, 7),
+ 2: linksTo(4, 5, 6),
+ 3: linksTo(7),
+ 4: linksTo(10),
+ 5: linksTo(7, 11),
+ 6: linksTo(7, 11),
+ 8: linksTo(9, 10, 11, 14, 15),
+ 9: linksTo(12, 14),
+ 10: linksTo(11, 12, 13, 14),
+ 11: linksTo(13),
+ 15: nil,
+ }
+)
+
+func main() {
+ for _, raw := range []struct {
+ name string
+ set []set
+ }{
+ {"zachary", zachary},
+ {"blondel", blondel},
+ } {
+ g := simple.NewUndirectedGraph(0, 0)
+ for u, e := range raw.set {
+ // Add nodes that are not defined by an edge.
+ if !g.Has(simple.Node(u)) {
+ g.AddNode(simple.Node(u))
+ }
+ for v := range e {
+ g.SetEdge(simple.Edge{F: simple.Node(u), T: simple.Node(v), W: 1})
+ }
+ }
+
+ nodes := g.Nodes()
+ sort.Sort(ordered.ByID(nodes))
+
+ fmt.Printf("%s = []set{\n", raw.name)
+ rank := network.PageRank(asDirected{g}, 0.85, 1e-8)
+ for _, u := range nodes {
+ to := g.From(nodes[u.ID()])
+ sort.Sort(ordered.ByID(to))
+ var links []int
+ for _, v := range to {
+ if rank[u.ID()] <= rank[v.ID()] {
+ links = append(links, v.ID())
+ }
+ }
+
+ if links == nil {
+ fmt.Printf("\t%d: nil, // rank=%.4v\n", u.ID(), rank[u.ID()])
+ continue
+ }
+
+ fmt.Printf("\t%d: linksTo(", u.ID())
+ for i, v := range links {
+ if i != 0 {
+ fmt.Print(", ")
+ }
+ fmt.Print(v)
+ }
+ fmt.Printf("), // rank=%.4v\n", rank[u.ID()])
+ }
+ fmt.Println("}")
+ }
+}
+
+type asDirected struct{ *simple.UndirectedGraph }
+
+func (g asDirected) HasEdgeFromTo(u, v graph.Node) bool {
+ return g.UndirectedGraph.HasEdgeBetween(u, v)
+}
+func (g asDirected) To(v graph.Node) []graph.Node { return g.From(v) }
diff --git a/vendor/github.com/openshift/api/build/v1/generated.pb.go b/vendor/github.com/openshift/api/build/v1/generated.pb.go
index 2e83d48c165b..8cc4518e3305 100644
--- a/vendor/github.com/openshift/api/build/v1/generated.pb.go
+++ b/vendor/github.com/openshift/api/build/v1/generated.pb.go
@@ -2181,6 +2181,14 @@ func (m *ImageChangeTrigger) MarshalTo(dAtA []byte) (int, error) {
}
i += n69
}
+ dAtA[i] = 0x18
+ i++
+ if m.Paused {
+ dAtA[i] = 1
+ } else {
+ dAtA[i] = 0
+ }
+ i++
return i, nil
}
@@ -3469,6 +3477,7 @@ func (m *ImageChangeTrigger) Size() (n int) {
l = m.From.Size()
n += 1 + l + sovGenerated(uint64(l))
}
+ n += 2
return n
}
@@ -4176,6 +4185,7 @@ func (this *ImageChangeTrigger) String() string {
s := strings.Join([]string{`&ImageChangeTrigger{`,
`LastTriggeredImageID:` + fmt.Sprintf("%v", this.LastTriggeredImageID) + `,`,
`From:` + strings.Replace(fmt.Sprintf("%v", this.From), "ObjectReference", "k8s_io_api_core_v1.ObjectReference", 1) + `,`,
+ `Paused:` + fmt.Sprintf("%v", this.Paused) + `,`,
`}`,
}, "")
return s
@@ -10476,6 +10486,26 @@ func (m *ImageChangeTrigger) Unmarshal(dAtA []byte) error {
return err
}
iNdEx = postIndex
+ case 3:
+ if wireType != 0 {
+ return fmt.Errorf("proto: wrong wireType = %d for field Paused", wireType)
+ }
+ var v int
+ for shift := uint(0); ; shift += 7 {
+ if shift >= 64 {
+ return ErrIntOverflowGenerated
+ }
+ if iNdEx >= l {
+ return io.ErrUnexpectedEOF
+ }
+ b := dAtA[iNdEx]
+ iNdEx++
+ v |= (int(b) & 0x7F) << shift
+ if b < 0x80 {
+ break
+ }
+ }
+ m.Paused = bool(v != 0)
default:
iNdEx = preIndex
skippy, err := skipGenerated(dAtA[iNdEx:])
@@ -12662,248 +12692,249 @@ func init() {
}
var fileDescriptorGenerated = []byte{
- // 3879 bytes of a gzipped FileDescriptorProto
- 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xdc, 0x5b, 0xcd, 0x6f, 0x1b, 0x49,
- 0x76, 0x77, 0x93, 0x14, 0x45, 0x3e, 0xca, 0xfa, 0x28, 0xc9, 0x63, 0x5a, 0xeb, 0x15, 0x3d, 0x3d,
- 0x98, 0x85, 0x27, 0xe3, 0xa1, 0x56, 0x1a, 0xdb, 0xf1, 0xee, 0x22, 0x1b, 0x88, 0x92, 0x6c, 0xcb,
- 0x2b, 0xdb, 0x42, 0x51, 0x6b, 0x3b, 0x3b, 0xc6, 0x26, 0xad, 0x56, 0x91, 0xea, 0x51, 0xb3, 0x9b,
- 0xee, 0x6a, 0x6a, 0x46, 0x0b, 0x04, 0x18, 0x04, 0x58, 0x0c, 0x76, 0xf7, 0x92, 0xbd, 0x2c, 0x92,
- 0x5c, 0x92, 0x20, 0xc8, 0x21, 0xc8, 0x29, 0x87, 0x00, 0x0b, 0xec, 0x25, 0x40, 0xf6, 0xe0, 0x43,
- 0x0e, 0x03, 0x24, 0x87, 0x01, 0x32, 0x20, 0x32, 0xcc, 0x21, 0xff, 0x83, 0x73, 0x09, 0xea, 0xa3,
- 0xbb, 0xab, 0x9b, 0x4d, 0xb9, 0x29, 0x79, 0x9c, 0x49, 0x6e, 0xe2, 0xfb, 0xf8, 0xbd, 0xaa, 0xea,
- 0x57, 0xf5, 0x3e, 0xaa, 0x04, 0x2b, 0x6d, 0xcb, 0x3f, 0xe8, 0xed, 0xd5, 0x4d, 0xb7, 0xb3, 0xec,
- 0x76, 0x89, 0x43, 0x0f, 0xac, 0x96, 0xbf, 0x6c, 0x74, 0xad, 0xe5, 0xbd, 0x9e, 0x65, 0xef, 0x2f,
- 0x1f, 0xad, 0x2c, 0xb7, 0x89, 0x43, 0x3c, 0xc3, 0x27, 0xfb, 0xf5, 0xae, 0xe7, 0xfa, 0x2e, 0x7a,
- 0x33, 0x52, 0xa9, 0x87, 0x2a, 0x75, 0xa3, 0x6b, 0xd5, 0xb9, 0x4a, 0xfd, 0x68, 0x65, 0xf1, 0x3d,
- 0x05, 0xb5, 0xed, 0xb6, 0xdd, 0x65, 0xae, 0xb9, 0xd7, 0x6b, 0xf1, 0x5f, 0xfc, 0x07, 0xff, 0x4b,
- 0x20, 0x2e, 0xea, 0x87, 0xb7, 0x68, 0xdd, 0x72, 0xb9, 0x59, 0xd3, 0xf5, 0x48, 0x8a, 0xd5, 0xc5,
- 0xeb, 0x91, 0x4c, 0xc7, 0x30, 0x0f, 0x2c, 0x87, 0x78, 0xc7, 0xcb, 0xdd, 0xc3, 0x36, 0x23, 0xd0,
- 0xe5, 0x0e, 0xf1, 0x8d, 0x34, 0xad, 0xe5, 0x51, 0x5a, 0x5e, 0xcf, 0xf1, 0xad, 0x0e, 0x19, 0x52,
- 0xb8, 0xf9, 0x32, 0x05, 0x6a, 0x1e, 0x90, 0x8e, 0x31, 0xa4, 0xf7, 0xfe, 0x28, 0xbd, 0x9e, 0x6f,
- 0xd9, 0xcb, 0x96, 0xe3, 0x53, 0xdf, 0x4b, 0x2a, 0xe9, 0xff, 0x54, 0x80, 0x4b, 0x0d, 0xcb, 0x31,
- 0xbc, 0xe3, 0x06, 0x5b, 0x39, 0x4c, 0x9e, 0xf5, 0x08, 0xf5, 0x1f, 0x76, 0x7d, 0xcb, 0x75, 0x28,
- 0xfa, 0x23, 0x28, 0xb1, 0x69, 0xed, 0x1b, 0xbe, 0x51, 0xd5, 0xae, 0x68, 0x57, 0x2b, 0xab, 0xdf,
- 0xae, 0x0b, 0x2b, 0x75, 0xd5, 0x4a, 0xbd, 0x7b, 0xd8, 0x66, 0x04, 0x5a, 0x67, 0xd2, 0xf5, 0xa3,
- 0x95, 0xfa, 0xc3, 0xbd, 0x0f, 0x89, 0xe9, 0xdf, 0x27, 0xbe, 0xd1, 0x40, 0xcf, 0xfb, 0xb5, 0x73,
- 0x83, 0x7e, 0x0d, 0x22, 0x1a, 0x0e, 0x51, 0xd1, 0xb7, 0xa0, 0x68, 0xd0, 0xdb, 0x96, 0x4d, 0xaa,
- 0xb9, 0x2b, 0xda, 0xd5, 0x72, 0x63, 0x5a, 0x4a, 0x17, 0xd7, 0x38, 0x15, 0x4b, 0x2e, 0xba, 0x09,
- 0xd3, 0x1e, 0x39, 0xb2, 0xa8, 0xe5, 0x3a, 0xeb, 0x6e, 0xa7, 0x63, 0xf9, 0xd5, 0x7c, 0x5c, 0x5e,
- 0x50, 0x71, 0x42, 0x0a, 0x7d, 0x07, 0x66, 0x02, 0xca, 0x7d, 0x42, 0xa9, 0xd1, 0x26, 0xd5, 0x02,
- 0x57, 0x9c, 0x91, 0x8a, 0x93, 0x92, 0x8c, 0x93, 0x72, 0xa8, 0x01, 0x28, 0x20, 0xad, 0xf5, 0xfc,
- 0x03, 0xd7, 0x7b, 0x60, 0x74, 0x48, 0x75, 0x82, 0x6b, 0x87, 0x93, 0x8a, 0x38, 0x38, 0x45, 0x1a,
- 0x6d, 0xc2, 0x7c, 0x9c, 0xba, 0xd9, 0x31, 0x2c, 0xbb, 0x5a, 0xe4, 0x20, 0xf3, 0x12, 0xa4, 0xa2,
- 0xb0, 0x70, 0x9a, 0x3c, 0xfa, 0x01, 0x5c, 0x88, 0xcf, 0xcb, 0x27, 0x62, 0x34, 0x93, 0x1c, 0xe8,
- 0x82, 0x04, 0x3a, 0x1f, 0x63, 0xe2, 0x74, 0x1d, 0xf4, 0x00, 0xde, 0x18, 0x62, 0x88, 0x61, 0x95,
- 0x38, 0xda, 0x1b, 0x12, 0x6d, 0x3a, 0xce, 0xc5, 0x23, 0xb4, 0xf4, 0xef, 0xc1, 0x9c, 0xe2, 0x41,
- 0x4d, 0xb7, 0xe7, 0x99, 0x44, 0xf9, 0xae, 0xda, 0x49, 0xdf, 0x55, 0xff, 0xb9, 0x06, 0x17, 0x1a,
- 0x96, 0xbf, 0xd7, 0x33, 0x0f, 0x89, 0xff, 0x98, 0xec, 0xdd, 0x75, 0xdd, 0xc3, 0x75, 0xa3, 0x47,
- 0x09, 0x7a, 0x06, 0x60, 0xba, 0x9d, 0x8e, 0xeb, 0x34, 0xbb, 0xc4, 0x94, 0xde, 0x77, 0xa3, 0xfe,
- 0xd2, 0x8d, 0x5f, 0x5f, 0xe7, 0x4a, 0x2a, 0x54, 0x63, 0x51, 0x1a, 0x47, 0xc3, 0x3c, 0xac, 0x18,
- 0xd1, 0x7f, 0x99, 0x83, 0x09, 0x3e, 0x89, 0xd7, 0xe0, 0xf8, 0x0f, 0xa0, 0x40, 0xd9, 0xc4, 0x72,
- 0x1c, 0xfd, 0x5a, 0x86, 0x89, 0x89, 0xe5, 0xed, 0x12, 0xb3, 0x31, 0x25, 0x91, 0x0b, 0xec, 0x17,
- 0xe6, 0x38, 0xe8, 0x11, 0x14, 0xa9, 0x6f, 0xf8, 0x3d, 0xca, 0x37, 0x46, 0x65, 0xb5, 0x9e, 0x19,
- 0x91, 0x6b, 0x45, 0x1f, 0x48, 0xfc, 0xc6, 0x12, 0x4d, 0xff, 0x9b, 0x1c, 0x54, 0xb8, 0xdc, 0xba,
- 0xeb, 0xb4, 0xac, 0xf6, 0x6b, 0x58, 0x99, 0xdd, 0xd8, 0xca, 0xac, 0x66, 0x9d, 0x87, 0x18, 0xdf,
- 0xc8, 0xf5, 0x79, 0x9a, 0x58, 0x9f, 0xeb, 0x63, 0xe2, 0x9e, 0xbc, 0x4a, 0xbf, 0xd5, 0x60, 0x46,
- 0x91, 0xde, 0xb6, 0xa8, 0x8f, 0x9e, 0x0e, 0xad, 0x54, 0x3d, 0xdb, 0x4a, 0x31, 0x6d, 0xbe, 0x4e,
- 0xb3, 0xd2, 0x5a, 0x29, 0xa0, 0x28, 0xab, 0xd4, 0x84, 0x09, 0xcb, 0x27, 0x1d, 0x5a, 0xcd, 0x5d,
- 0xc9, 0x8f, 0xf3, 0xb9, 0xc5, 0x00, 0x1b, 0xe7, 0x25, 0xf4, 0xc4, 0x16, 0x03, 0xc1, 0x02, 0x4b,
- 0xff, 0x22, 0x1f, 0x9b, 0x06, 0x5b, 0x3e, 0x64, 0x42, 0xc9, 0xf7, 0xac, 0x76, 0x9b, 0x78, 0xb4,
- 0xaa, 0x71, 0x5b, 0x37, 0xb2, 0xda, 0xda, 0x15, 0x7a, 0x3b, 0xae, 0x6d, 0x99, 0xc7, 0xd1, 0x6c,
- 0x24, 0x99, 0xe2, 0x10, 0x18, 0xad, 0x41, 0xd9, 0xeb, 0x39, 0x42, 0x50, 0x46, 0x82, 0xb7, 0xa4,
- 0x78, 0x19, 0x07, 0x8c, 0x17, 0xfd, 0xda, 0xb4, 0x88, 0x52, 0x01, 0x05, 0x47, 0x5a, 0xc8, 0x88,
- 0x9d, 0x17, 0xe2, 0x23, 0xbf, 0x97, 0xf9, 0xbc, 0xe0, 0x7e, 0x13, 0xfa, 0x65, 0x44, 0x53, 0xcf,
- 0x07, 0xb4, 0x0f, 0x97, 0x69, 0xcf, 0x34, 0x09, 0xa5, 0xad, 0x9e, 0xcd, 0x47, 0x42, 0xef, 0x5a,
- 0xd4, 0x77, 0xbd, 0xe3, 0x6d, 0x8b, 0x85, 0x24, 0x16, 0x59, 0x26, 0x1a, 0x57, 0x06, 0xfd, 0xda,
- 0xe5, 0xe6, 0x09, 0x72, 0xf8, 0x44, 0x14, 0xf4, 0x04, 0xaa, 0x2d, 0xc3, 0xb2, 0xc9, 0x7e, 0x8a,
- 0x85, 0x09, 0x6e, 0xe1, 0xf2, 0xa0, 0x5f, 0xab, 0xde, 0x1e, 0x21, 0x83, 0x47, 0x6a, 0xeb, 0xf7,
- 0x60, 0x6e, 0xc8, 0xa5, 0xd1, 0x0d, 0xa8, 0xd8, 0x06, 0xf5, 0x1f, 0x11, 0x8f, 0x9d, 0xed, 0xdc,
- 0x53, 0xf3, 0x51, 0x68, 0xda, 0x8e, 0x58, 0x58, 0x95, 0xd3, 0x7f, 0xad, 0x41, 0x99, 0x83, 0xbd,
- 0x06, 0x5f, 0xbf, 0x1f, 0xf7, 0xf5, 0xab, 0x59, 0xfd, 0x6f, 0x84, 0x97, 0x03, 0x94, 0xc4, 0xc8,
- 0xdd, 0xb6, 0xfe, 0x69, 0x41, 0x7a, 0xfc, 0xb6, 0xdb, 0x0e, 0xb2, 0x9e, 0x65, 0x28, 0x9b, 0xae,
- 0xe3, 0x1b, 0x6c, 0xc8, 0x32, 0x7c, 0xcd, 0x05, 0xce, 0xb8, 0x1e, 0x30, 0x70, 0x24, 0xc3, 0x82,
- 0x5d, 0xcb, 0xb5, 0x6d, 0xf7, 0x23, 0xee, 0xba, 0xa5, 0xe8, 0x94, 0xb8, 0xcd, 0xa9, 0x58, 0x72,
- 0xd1, 0x35, 0x28, 0x75, 0x59, 0x10, 0x75, 0xe5, 0x29, 0x54, 0x8a, 0x66, 0xbd, 0x23, 0xe9, 0x38,
- 0x94, 0x40, 0xd7, 0x61, 0x8a, 0x5a, 0x8e, 0x49, 0x9a, 0xc4, 0x74, 0x9d, 0x7d, 0xca, 0xbd, 0x2b,
- 0xdf, 0x98, 0x1d, 0xf4, 0x6b, 0x53, 0x4d, 0x85, 0x8e, 0x63, 0x52, 0xe8, 0x31, 0x94, 0xf9, 0xef,
- 0x5d, 0x4b, 0x26, 0x2b, 0x95, 0xd5, 0xdf, 0xc9, 0xf6, 0x29, 0x98, 0x46, 0xe3, 0x3c, 0x9b, 0x64,
- 0x33, 0x00, 0xc0, 0x11, 0x16, 0x5a, 0x05, 0x60, 0xd9, 0x27, 0xf5, 0x8d, 0x4e, 0x97, 0xf2, 0x0c,
- 0xa6, 0x14, 0x6d, 0x98, 0xdd, 0x90, 0x83, 0x15, 0x29, 0xf4, 0x2e, 0x94, 0x7d, 0xc3, 0xb2, 0xb7,
- 0x2d, 0x87, 0x50, 0x9e, 0xab, 0xe4, 0x85, 0x81, 0xdd, 0x80, 0x88, 0x23, 0x3e, 0xaa, 0x03, 0xd8,
- 0xcc, 0x4d, 0x1b, 0xc7, 0x3e, 0xa1, 0x3c, 0x17, 0xc9, 0x37, 0xa6, 0x19, 0xf8, 0x76, 0x48, 0xc5,
- 0x8a, 0x04, 0x5b, 0x75, 0xc7, 0xfd, 0xc8, 0xb0, 0xfc, 0x6a, 0x39, 0xbe, 0xea, 0x0f, 0xdc, 0xc7,
- 0x86, 0xe5, 0x63, 0xc9, 0x45, 0x6f, 0xc3, 0xe4, 0x91, 0x74, 0x6e, 0xe0, 0xa0, 0x15, 0x96, 0xf6,
- 0x05, 0x4e, 0x1d, 0xf0, 0xf4, 0x9f, 0x07, 0x81, 0xee, 0x61, 0xcf, 0xef, 0xf6, 0x7c, 0xf4, 0x3d,
- 0xc8, 0xf9, 0xae, 0x74, 0xe6, 0xb7, 0x94, 0x15, 0xac, 0xb3, 0xf2, 0x20, 0x0a, 0x68, 0x98, 0xb4,
- 0x88, 0x47, 0x1c, 0x93, 0x34, 0x8a, 0x83, 0x7e, 0x2d, 0xb7, 0xeb, 0xe2, 0x9c, 0xef, 0xa2, 0x27,
- 0x00, 0xdd, 0x1e, 0x3d, 0x68, 0x12, 0xd3, 0x23, 0xbe, 0x8c, 0x64, 0x57, 0xd3, 0x40, 0xb6, 0x5d,
- 0xd3, 0xb0, 0x93, 0x48, 0x7c, 0xd6, 0x3b, 0xa1, 0x3e, 0x56, 0xb0, 0xd0, 0x3e, 0x54, 0xac, 0x8e,
- 0xd1, 0x26, 0xdb, 0xc6, 0x1e, 0xb1, 0x99, 0x1b, 0xe5, 0x33, 0x9e, 0x73, 0x5b, 0xa1, 0x56, 0xb4,
- 0xbb, 0x23, 0x1a, 0xc5, 0x2a, 0xac, 0xfe, 0x27, 0x1a, 0xcc, 0xf3, 0xc5, 0xd8, 0x71, 0xa9, 0x2f,
- 0xf2, 0x3d, 0x7e, 0x02, 0xbe, 0x0d, 0x93, 0xec, 0x3c, 0x34, 0x9c, 0x7d, 0x1e, 0x0b, 0xca, 0x62,
- 0x2d, 0xd7, 0x05, 0x09, 0x07, 0x3c, 0x74, 0x19, 0x0a, 0x86, 0xd7, 0x16, 0xfb, 0xb5, 0xdc, 0x28,
- 0xb1, 0x50, 0xbc, 0xe6, 0xb5, 0x29, 0xe6, 0x54, 0xf6, 0xe1, 0xa8, 0xe9, 0x59, 0xdd, 0xa1, 0x1c,
- 0xbe, 0xc9, 0xa9, 0x58, 0x72, 0xf5, 0xdf, 0x4e, 0xc2, 0x94, 0x5a, 0x95, 0xbc, 0x86, 0xdc, 0xe3,
- 0x03, 0x28, 0x05, 0x59, 0xae, 0xfc, 0x6a, 0x2b, 0x19, 0x96, 0x56, 0xe4, 0xbc, 0x58, 0x2a, 0x36,
- 0xa6, 0xd8, 0x86, 0x0e, 0x7e, 0xe1, 0x10, 0x10, 0x11, 0x98, 0x95, 0x01, 0x8f, 0xec, 0x37, 0x8e,
- 0xf9, 0xda, 0xcb, 0x38, 0x95, 0xc9, 0xbf, 0x16, 0x06, 0xfd, 0xda, 0xec, 0x6e, 0x02, 0x00, 0x0f,
- 0x41, 0xa2, 0x35, 0x28, 0xb4, 0x3c, 0xb7, 0xc3, 0xcf, 0x8b, 0x8c, 0xd0, 0xfc, 0x0b, 0xdd, 0xf6,
- 0xdc, 0x0e, 0xe6, 0xaa, 0xe8, 0x09, 0x14, 0xf7, 0x78, 0x4a, 0x2f, 0x4f, 0x90, 0x4c, 0xc9, 0x52,
- 0xb2, 0x06, 0x68, 0x00, 0xfb, 0xa6, 0x82, 0x8c, 0x25, 0x1e, 0x5a, 0x89, 0x47, 0x9b, 0x22, 0xdf,
- 0x90, 0x33, 0x27, 0x45, 0x1a, 0xf4, 0x1d, 0xc8, 0x13, 0xe7, 0xa8, 0x3a, 0xc9, 0x3d, 0x7d, 0x31,
- 0x6d, 0x3a, 0x9b, 0xce, 0xd1, 0x23, 0xc3, 0x6b, 0x54, 0xe4, 0xa7, 0xcd, 0x6f, 0x3a, 0x47, 0x98,
- 0xe9, 0xa0, 0x43, 0xa8, 0x28, 0xcb, 0x53, 0x2d, 0x71, 0x88, 0xeb, 0x63, 0xa6, 0x2f, 0xa2, 0x86,
- 0x08, 0xf7, 0x8c, 0xf2, 0x05, 0xb0, 0x8a, 0x8e, 0x7e, 0xa6, 0xc1, 0x85, 0x7d, 0xd7, 0x3c, 0x24,
- 0x5e, 0xd3, 0x67, 0x25, 0x76, 0xfb, 0x58, 0x06, 0x14, 0x7e, 0x3e, 0x55, 0x56, 0x6f, 0x65, 0xb0,
- 0xbb, 0x91, 0xa6, 0xdf, 0xb8, 0x34, 0xe8, 0xd7, 0x2e, 0xa4, 0xb2, 0x70, 0xba, 0x45, 0x3e, 0x16,
- 0xca, 0xbf, 0x42, 0x72, 0x2c, 0x90, 0x79, 0x2c, 0xcd, 0x34, 0x7d, 0x31, 0x96, 0x54, 0x16, 0x4e,
- 0xb7, 0xa8, 0xff, 0xdb, 0x84, 0x3c, 0x58, 0x65, 0x69, 0xf8, 0x3e, 0x14, 0xfc, 0xe3, 0x6e, 0x50,
- 0x18, 0xd6, 0x82, 0x5c, 0x7d, 0xf7, 0xb8, 0x4b, 0x5e, 0xf4, 0x6b, 0x33, 0x8a, 0x28, 0x23, 0x61,
- 0x2e, 0xac, 0x78, 0x64, 0xee, 0x15, 0x7b, 0x64, 0x1d, 0x40, 0xac, 0x61, 0x8b, 0x55, 0xab, 0xf2,
- 0x44, 0x62, 0x07, 0xc4, 0x46, 0x48, 0xc5, 0x8a, 0x04, 0xda, 0x86, 0x7c, 0x5b, 0xe6, 0x7a, 0xd9,
- 0x4e, 0x87, 0x3b, 0x96, 0xaf, 0x8e, 0x61, 0x92, 0x79, 0xe8, 0x1d, 0xcb, 0xc7, 0x0c, 0x86, 0x95,
- 0x6d, 0xfc, 0xdc, 0xa5, 0xd5, 0x89, 0xcc, 0x79, 0x3c, 0xdf, 0xe6, 0x12, 0x2d, 0x3c, 0x3b, 0x39,
- 0x91, 0x62, 0x89, 0xc6, 0xa2, 0x35, 0xcb, 0x4f, 0xc8, 0xc7, 0xfe, 0x86, 0xe5, 0xc9, 0x7e, 0x83,
- 0x92, 0xde, 0x06, 0x1c, 0xac, 0x48, 0xa1, 0x1f, 0xc3, 0x94, 0xfc, 0x82, 0x22, 0x6c, 0x4d, 0x8e,
- 0x19, 0xb6, 0x44, 0x6a, 0xa2, 0x20, 0xe0, 0x18, 0x1e, 0xfa, 0x43, 0x98, 0xa4, 0xfc, 0x2f, 0x3a,
- 0xc6, 0x4e, 0x14, 0xba, 0xea, 0x02, 0x86, 0x9d, 0x1b, 0xc1, 0xa2, 0x38, 0x40, 0x45, 0x87, 0x7c,
- 0xd2, 0x2d, 0xab, 0x7d, 0xdf, 0xe8, 0xb2, 0x5d, 0xc7, 0x6c, 0xfc, 0x6e, 0xa6, 0x12, 0x40, 0x2a,
- 0xa9, 0x66, 0xd4, 0xd5, 0x92, 0x90, 0x58, 0x81, 0xd7, 0xff, 0x3d, 0x48, 0x80, 0x79, 0x60, 0x34,
- 0x52, 0xba, 0x15, 0xaf, 0xb8, 0xfa, 0x48, 0x1c, 0x66, 0xb9, 0xaf, 0xf2, 0x30, 0xd3, 0x3f, 0x9d,
- 0x0c, 0x36, 0xad, 0xa8, 0x12, 0x56, 0x60, 0xa2, 0x7b, 0x60, 0xd0, 0x60, 0xd7, 0x7e, 0x23, 0x48,
- 0xac, 0x77, 0x18, 0xf1, 0x45, 0xbf, 0x06, 0x22, 0x5b, 0x60, 0xbf, 0xb0, 0x90, 0xe4, 0x69, 0xb4,
- 0xe1, 0x98, 0xc4, 0xb6, 0xc9, 0xbe, 0x4c, 0x8c, 0xa3, 0x34, 0x3a, 0x60, 0xe0, 0x48, 0x06, 0xdd,
- 0x84, 0xa2, 0x47, 0x0c, 0xea, 0x3a, 0x72, 0x17, 0x2e, 0x05, 0xbe, 0x8d, 0x39, 0xf5, 0x05, 0xf3,
- 0x2e, 0x51, 0x76, 0xf3, 0xdf, 0x58, 0x4a, 0xa3, 0x77, 0x60, 0xb2, 0x73, 0x72, 0x6f, 0x2f, 0xe0,
- 0xa3, 0x16, 0x4c, 0x53, 0xdf, 0xf0, 0xfc, 0x30, 0x5f, 0x3d, 0x45, 0x8a, 0x8c, 0x06, 0xfd, 0xda,
- 0x74, 0x33, 0x86, 0x82, 0x13, 0xa8, 0xa8, 0x07, 0xf3, 0xa6, 0xdb, 0xe9, 0xda, 0x84, 0x1d, 0x81,
- 0x91, 0xb1, 0xe2, 0xd8, 0xc6, 0x2e, 0x0e, 0xfa, 0xb5, 0xf9, 0xf5, 0x61, 0x28, 0x9c, 0x86, 0x8f,
- 0x7e, 0x0f, 0x4a, 0xfb, 0x3d, 0xcf, 0x60, 0x44, 0x99, 0x6e, 0xbf, 0x19, 0x14, 0x18, 0x1b, 0x92,
- 0xfe, 0xa2, 0x5f, 0x3b, 0xcf, 0x32, 0xf4, 0x7a, 0x40, 0xc0, 0xa1, 0x0a, 0xda, 0x83, 0x45, 0x97,
- 0x27, 0xbf, 0xe2, 0xe8, 0x13, 0x09, 0x46, 0xb0, 0xbd, 0x65, 0x77, 0x50, 0x97, 0x80, 0x8b, 0x0f,
- 0x47, 0x4a, 0xe2, 0x13, 0x50, 0xd0, 0x1d, 0x28, 0x8a, 0x4d, 0x24, 0xa3, 0x62, 0xa6, 0xfc, 0x04,
- 0x44, 0x87, 0x97, 0xa9, 0x61, 0xa9, 0x8e, 0x9e, 0x42, 0x51, 0x98, 0x91, 0x21, 0xed, 0xfa, 0x78,
- 0x0d, 0x2f, 0x31, 0xfc, 0xe8, 0xfc, 0x14, 0xbf, 0xb1, 0xc4, 0x44, 0xbb, 0xbc, 0x5d, 0xc4, 0xce,
- 0xe5, 0x0a, 0xdf, 0x67, 0x59, 0x1a, 0x74, 0x4d, 0xa6, 0xb0, 0xe5, 0xb4, 0xdc, 0x58, 0x9b, 0x88,
- 0x9f, 0xca, 0x02, 0x8b, 0x9d, 0xca, 0xb6, 0xdb, 0x6e, 0x3a, 0x56, 0xb7, 0x4b, 0xfc, 0xea, 0x54,
- 0xfc, 0x54, 0xde, 0x0e, 0x39, 0x58, 0x91, 0xd2, 0x4d, 0x59, 0xb4, 0xab, 0xc3, 0x46, 0x0f, 0x94,
- 0xe2, 0xe4, 0xe6, 0x69, 0x26, 0xbe, 0xeb, 0xaa, 0xf5, 0x8a, 0xbe, 0x2d, 0xd3, 0xfd, 0xb8, 0x08,
- 0xba, 0x21, 0x8b, 0x8d, 0x0d, 0xab, 0x4d, 0xa8, 0x2f, 0xf7, 0x7e, 0xbc, 0x7a, 0x10, 0x2c, 0xac,
- 0xca, 0xe9, 0xbf, 0x29, 0xc0, 0x79, 0x09, 0x27, 0x52, 0x01, 0x74, 0x23, 0x16, 0xf3, 0xdf, 0x4c,
- 0xc4, 0xfc, 0xb9, 0x98, 0xb0, 0x12, 0xf5, 0x3d, 0x98, 0x8e, 0xe7, 0x37, 0x32, 0xfa, 0xdf, 0xcc,
- 0x9c, 0x4a, 0xc5, 0x90, 0xc5, 0xd6, 0x8d, 0x27, 0x52, 0x38, 0x61, 0x81, 0xd9, 0x8c, 0xe7, 0x31,
- 0x32, 0x47, 0xbf, 0x99, 0x39, 0x65, 0x4a, 0xb1, 0x19, 0x4f, 0x98, 0x70, 0xc2, 0x02, 0xb3, 0x69,
- 0xf6, 0xa8, 0xef, 0x76, 0x42, 0x9b, 0x85, 0xcc, 0x36, 0xd7, 0xb9, 0x62, 0x8a, 0xcd, 0xf5, 0x18,
- 0x22, 0x4e, 0x58, 0x40, 0xbf, 0xd2, 0xe0, 0xe2, 0x87, 0xc4, 0x39, 0xb4, 0x1c, 0xba, 0x63, 0x75,
- 0x89, 0x6d, 0x39, 0xd1, 0x8c, 0xc5, 0xa1, 0xf8, 0xfb, 0x19, 0xac, 0xdf, 0x8b, 0x23, 0xc4, 0x87,
- 0xf1, 0x8d, 0x41, 0xbf, 0x76, 0xf1, 0x5e, 0xba, 0x0d, 0x3c, 0xca, 0xb8, 0xfe, 0xd3, 0x09, 0xe9,
- 0xf1, 0x6a, 0xc8, 0x52, 0x0f, 0x79, 0xed, 0x25, 0x87, 0xbc, 0x07, 0xd3, 0xfc, 0x9a, 0xcb, 0x32,
- 0x65, 0xa7, 0x7f, 0x0c, 0xaf, 0xb9, 0x13, 0x53, 0x14, 0xd1, 0x92, 0xaf, 0x66, 0x9c, 0x81, 0x13,
- 0x16, 0x90, 0x03, 0xe7, 0x05, 0x78, 0x60, 0x32, 0x9f, 0xf9, 0xc2, 0xe2, 0x8e, 0xe5, 0xdf, 0x0d,
- 0xf5, 0x84, 0xc5, 0xb9, 0x41, 0xbf, 0x76, 0x3e, 0x46, 0xc7, 0x71, 0x78, 0xd4, 0x83, 0x59, 0xbe,
- 0xe3, 0xd6, 0x0f, 0x0c, 0xa7, 0x2d, 0x96, 0x5d, 0xfa, 0xcc, 0xfb, 0x59, 0x33, 0x48, 0xa1, 0x2a,
- 0x0c, 0xf2, 0xda, 0x72, 0x2b, 0x01, 0x88, 0x87, 0x4c, 0xc8, 0x69, 0xda, 0x46, 0x38, 0xcd, 0x89,
- 0x71, 0xa6, 0xb9, 0x6d, 0xa4, 0x4f, 0x33, 0xa2, 0xe3, 0x38, 0x3c, 0xfa, 0x09, 0xcc, 0xee, 0x25,
- 0x6e, 0x87, 0x64, 0x10, 0xbd, 0x95, 0xa9, 0x00, 0x48, 0xb9, 0x58, 0x12, 0x73, 0x4d, 0xb2, 0xf0,
- 0x90, 0x1d, 0xfd, 0xd7, 0x05, 0x40, 0xc3, 0x6d, 0x6c, 0x74, 0x3d, 0x76, 0x94, 0x5d, 0x49, 0x1c,
- 0x65, 0xb3, 0xaa, 0x86, 0x72, 0x92, 0x3d, 0x85, 0xa2, 0x18, 0xef, 0x18, 0x6d, 0x05, 0x39, 0x10,
- 0x09, 0x96, 0xe6, 0x14, 0x12, 0x93, 0x65, 0xd6, 0xd2, 0x1f, 0xa5, 0xdf, 0x9d, 0x02, 0x3e, 0xcd,
- 0xcb, 0x03, 0x54, 0x74, 0x20, 0x03, 0x81, 0xf0, 0x05, 0xe9, 0x69, 0x37, 0xc6, 0xf3, 0xb4, 0xc0,
- 0xd0, 0x4c, 0x18, 0x3b, 0x04, 0x1d, 0xab, 0xd0, 0x72, 0xa1, 0x6c, 0x63, 0x4f, 0xba, 0xd6, 0x19,
- 0x16, 0x4a, 0x71, 0x2b, 0x89, 0x89, 0x08, 0x94, 0xc3, 0xef, 0x2c, 0x1d, 0xe9, 0x14, 0x06, 0xd2,
- 0x3d, 0x28, 0x42, 0xd6, 0xff, 0xa5, 0x08, 0x4a, 0x16, 0x8f, 0xbe, 0x0f, 0xd3, 0x94, 0x78, 0x47,
- 0x96, 0x49, 0xd6, 0x4c, 0xd3, 0xed, 0x39, 0x41, 0x24, 0x0d, 0x6f, 0x5a, 0x9b, 0x31, 0x2e, 0x4e,
- 0x48, 0xf3, 0xbb, 0x3d, 0x1e, 0x30, 0xa4, 0xf3, 0x64, 0xbf, 0xdb, 0x4b, 0x14, 0x89, 0xb2, 0x51,
- 0x25, 0xd1, 0x62, 0xdd, 0xae, 0xfc, 0xab, 0xee, 0x76, 0xfd, 0x18, 0x4a, 0x34, 0x1e, 0xcd, 0xbe,
- 0x9d, 0x3d, 0x51, 0x91, 0x01, 0x24, 0x6c, 0x8f, 0x87, 0x51, 0x23, 0xc4, 0x64, 0x8b, 0x22, 0xf3,
- 0xbf, 0x89, 0xf1, 0x16, 0xe5, 0x25, 0x99, 0xdf, 0x1f, 0x40, 0xd9, 0x23, 0x62, 0x81, 0xa8, 0x74,
- 0x91, 0xd4, 0x12, 0x18, 0x4b, 0x21, 0x4c, 0x9e, 0xf5, 0x2c, 0x8f, 0x74, 0x88, 0xe3, 0xd3, 0xa8,
- 0xc0, 0x09, 0xb8, 0x14, 0x47, 0x68, 0xe8, 0x43, 0x80, 0x6e, 0xd8, 0x4f, 0x95, 0xe5, 0x75, 0xe6,
- 0xec, 0x2d, 0xde, 0x89, 0x8d, 0xd2, 0xc6, 0x88, 0x8e, 0x15, 0x74, 0xf4, 0x01, 0x5c, 0x8a, 0x2a,
- 0x84, 0x0d, 0x62, 0xec, 0xf3, 0x18, 0x2b, 0xaf, 0x12, 0x44, 0x73, 0xfd, 0x9b, 0x83, 0x7e, 0xed,
- 0xd2, 0xfa, 0x28, 0x21, 0x3c, 0x5a, 0x1f, 0x75, 0x60, 0xca, 0x71, 0xf7, 0x49, 0x93, 0xd8, 0xc4,
- 0xf4, 0x5d, 0x4f, 0xa6, 0xf2, 0x59, 0x4a, 0x6d, 0xd1, 0x14, 0x32, 0xec, 0x07, 0x8a, 0xba, 0x68,
- 0x1c, 0xa8, 0x14, 0x1c, 0x83, 0xd7, 0xff, 0x5c, 0x83, 0x94, 0xab, 0xfb, 0x98, 0xfb, 0x6a, 0xaf,
- 0xda, 0x7d, 0xbf, 0x05, 0x45, 0x1a, 0x75, 0xef, 0xd5, 0x26, 0xb5, 0x68, 0x6d, 0x48, 0xae, 0xfe,
- 0x0f, 0x1a, 0x2c, 0xa4, 0xf5, 0x0f, 0x98, 0x1f, 0x85, 0xdd, 0x02, 0x39, 0xbc, 0xec, 0xad, 0x14,
- 0xf5, 0xbe, 0x49, 0x40, 0xe0, 0x08, 0x8d, 0x9d, 0x27, 0xfb, 0x84, 0xfa, 0x96, 0xc3, 0xcb, 0xb6,
- 0x0d, 0xcb, 0x93, 0x63, 0x0c, 0xcf, 0x93, 0x8d, 0x18, 0x17, 0x27, 0xa4, 0xf5, 0x5f, 0x14, 0x60,
- 0x3e, 0x25, 0x6d, 0x44, 0x9b, 0xb2, 0x73, 0x3c, 0xc6, 0xa5, 0x47, 0x78, 0xd5, 0x1e, 0xeb, 0x1e,
- 0x43, 0xb7, 0x67, 0xdb, 0x67, 0xbb, 0xfc, 0x08, 0xf4, 0xb1, 0x82, 0x15, 0xb4, 0x82, 0xf3, 0xa7,
- 0x68, 0x05, 0xdf, 0x03, 0x44, 0x3e, 0xee, 0xba, 0x94, 0xc8, 0xf4, 0xdf, 0xe5, 0x21, 0xa0, 0xc0,
- 0xdb, 0x12, 0xe1, 0xfb, 0x90, 0xcd, 0x21, 0x09, 0x9c, 0xa2, 0x85, 0x96, 0xa1, 0xdc, 0x72, 0x3d,
- 0x93, 0xb0, 0x51, 0xf2, 0xd3, 0x47, 0xe9, 0x6c, 0xdc, 0x0e, 0x18, 0x38, 0x92, 0x41, 0x4f, 0xa2,
- 0xce, 0x57, 0x31, 0xf3, 0x85, 0x8d, 0x98, 0x33, 0xdf, 0xec, 0xa3, 0x5b, 0x5e, 0x6b, 0x30, 0xc3,
- 0x15, 0xd6, 0x76, 0xb6, 0x82, 0x9e, 0xba, 0x78, 0x13, 0x74, 0x51, 0xaa, 0x88, 0x7e, 0x6a, 0xc4,
- 0xc6, 0x49, 0x79, 0xfd, 0x79, 0x01, 0xe6, 0x53, 0x8a, 0xa5, 0xf0, 0x1e, 0x41, 0x3b, 0xcb, 0x3d,
- 0xc2, 0x57, 0xe5, 0x09, 0xef, 0xc0, 0xa4, 0xe3, 0xae, 0x1b, 0xe6, 0x01, 0x91, 0x37, 0xa9, 0xe1,
- 0x12, 0x3d, 0x10, 0x64, 0x1c, 0xf0, 0x03, 0xa7, 0x29, 0x9c, 0xc2, 0x69, 0xc6, 0xfe, 0xd0, 0xdf,
- 0x0f, 0x0a, 0xd6, 0x96, 0x65, 0x93, 0x1d, 0xc3, 0x3f, 0x90, 0xad, 0xd7, 0x68, 0x67, 0xc6, 0xb8,
- 0x38, 0x21, 0x8d, 0x7e, 0x00, 0x65, 0xf1, 0x79, 0xbc, 0x36, 0xcd, 0x70, 0xe3, 0x11, 0x0e, 0xa6,
- 0x11, 0x28, 0xe1, 0x48, 0x1f, 0x75, 0xe1, 0x22, 0xcf, 0xac, 0xd8, 0x99, 0xdb, 0xb1, 0x7e, 0xc2,
- 0xb7, 0xbf, 0x7c, 0x62, 0x21, 0x7a, 0x39, 0x37, 0x59, 0x7d, 0xb6, 0x95, 0x2e, 0xf2, 0x62, 0x34,
- 0x0b, 0x8f, 0x82, 0xd5, 0x7f, 0xa1, 0x41, 0xfa, 0x3d, 0x45, 0x7c, 0x62, 0xda, 0x19, 0x27, 0xf6,
- 0x76, 0xf4, 0xf1, 0x45, 0x5f, 0xb1, 0x92, 0xf6, 0xe1, 0xf5, 0xbf, 0xd0, 0x60, 0x3e, 0xa5, 0x9e,
- 0xfb, 0x7a, 0xc4, 0x8d, 0xcf, 0x73, 0xc9, 0xc1, 0x6d, 0x1e, 0x11, 0xc7, 0x3f, 0xdd, 0xed, 0xc8,
- 0xa6, 0xb8, 0x93, 0xc8, 0xc9, 0xf6, 0x62, 0xa6, 0x62, 0x8c, 0x37, 0xaa, 0xe2, 0x97, 0x11, 0x67,
- 0x38, 0x5e, 0x47, 0x5f, 0x7e, 0x15, 0x5e, 0xf7, 0xe5, 0x97, 0xfe, 0x8f, 0x1a, 0x4c, 0xc7, 0x2f,
- 0x5d, 0xd0, 0x37, 0x21, 0xdf, 0xf3, 0x2c, 0xb9, 0xa8, 0xe1, 0xe8, 0x7f, 0x88, 0xb7, 0x30, 0xa3,
- 0x33, 0xb6, 0x47, 0x5a, 0xf2, 0x8b, 0x85, 0x6c, 0x4c, 0x5a, 0x98, 0xd1, 0x11, 0x81, 0x4a, 0xd7,
- 0x73, 0x3f, 0x3e, 0x16, 0xc1, 0x78, 0x8c, 0x07, 0x76, 0x3b, 0x91, 0x56, 0xd4, 0x36, 0x53, 0x88,
- 0x58, 0xc5, 0xe5, 0x69, 0xce, 0x70, 0x33, 0xe0, 0xeb, 0xe1, 0xae, 0xff, 0xad, 0xc1, 0xa4, 0x74,
- 0x1a, 0xf4, 0x0c, 0xa6, 0xdb, 0xb1, 0xe5, 0x1d, 0x63, 0x58, 0x89, 0xcb, 0xb0, 0xf0, 0x5c, 0x8c,
- 0xd3, 0x71, 0xc2, 0x00, 0xfa, 0x63, 0x98, 0x6b, 0x5b, 0x7e, 0x7c, 0x4e, 0x63, 0xdc, 0x04, 0xde,
- 0x49, 0xea, 0x36, 0x2e, 0x49, 0xc3, 0x73, 0x43, 0x2c, 0x3c, 0x6c, 0x49, 0xff, 0x54, 0x7c, 0x99,
- 0x44, 0xff, 0xe2, 0x7f, 0xe3, 0x89, 0xea, 0xdf, 0xe7, 0x60, 0x78, 0xc8, 0xec, 0x2b, 0x9a, 0xa2,
- 0xa8, 0xd0, 0x52, 0x5f, 0x45, 0x4b, 0x2e, 0x2b, 0xae, 0x0d, 0xfe, 0xac, 0x78, 0x8c, 0xb5, 0x13,
- 0xa6, 0xd6, 0x5d, 0xc7, 0xf7, 0x5c, 0xfb, 0x87, 0x94, 0x78, 0xca, 0x5b, 0x5e, 0x8e, 0x85, 0x25,
- 0x26, 0x2b, 0xae, 0xcd, 0xe0, 0x69, 0xf0, 0x18, 0xaf, 0x2c, 0x87, 0x0d, 0x28, 0xd9, 0xaf, 0x84,
- 0xc3, 0x11, 0xf2, 0x18, 0xd7, 0x3d, 0xfa, 0xcf, 0x34, 0x98, 0x4d, 0xf6, 0xba, 0x98, 0x3e, 0x8f,
- 0x5f, 0x5b, 0x1b, 0xc9, 0x4e, 0xe2, 0x96, 0x20, 0xe3, 0x80, 0x8f, 0xee, 0xc1, 0x24, 0xcb, 0x63,
- 0xb0, 0x3c, 0x1b, 0x32, 0x66, 0x41, 0x3c, 0x1a, 0xdd, 0x16, 0x7a, 0x38, 0x00, 0xd0, 0xff, 0x4e,
- 0x03, 0x34, 0xdc, 0x0d, 0x41, 0x3b, 0xb0, 0x60, 0x1b, 0xd4, 0x0f, 0x2f, 0xe2, 0xb6, 0x62, 0x43,
- 0xbb, 0x2c, 0x87, 0xb6, 0xb0, 0x9d, 0x22, 0x83, 0x53, 0x35, 0xc3, 0xbc, 0x2d, 0x77, 0xea, 0xbc,
- 0x4d, 0x6f, 0x02, 0x44, 0x4f, 0x83, 0xd0, 0x15, 0x28, 0x38, 0x46, 0x27, 0x08, 0x49, 0x61, 0xc6,
- 0xcf, 0xdf, 0x98, 0x73, 0x0e, 0x7a, 0x0b, 0x26, 0x8e, 0x0c, 0xbb, 0x17, 0x3c, 0xe2, 0x0f, 0x9f,
- 0xdd, 0x3d, 0x62, 0x44, 0x2c, 0x78, 0xfa, 0x5f, 0xe6, 0xa0, 0xa2, 0x5c, 0x5d, 0xbf, 0xaa, 0x6a,
- 0xe3, 0x31, 0x4c, 0x74, 0x0d, 0xff, 0x20, 0x78, 0x1c, 0xb8, 0x3a, 0xde, 0x05, 0x3a, 0xcb, 0xba,
- 0xa2, 0xf1, 0xb2, 0x5f, 0x14, 0x0b, 0xbc, 0x44, 0xf2, 0x9a, 0x7f, 0x85, 0xc9, 0xeb, 0x1b, 0x90,
- 0x33, 0x28, 0x4f, 0x48, 0xcb, 0xe2, 0x16, 0x66, 0x8d, 0xe2, 0x9c, 0x41, 0xf5, 0x9f, 0x6a, 0x30,
- 0x93, 0x18, 0x1b, 0x5a, 0x05, 0xa0, 0xe1, 0x2f, 0xf9, 0x09, 0xc2, 0xda, 0x3f, 0x92, 0xc3, 0x8a,
- 0xd4, 0x99, 0xeb, 0xc3, 0x7f, 0xd5, 0xe0, 0xf2, 0x49, 0x8d, 0x7d, 0x56, 0x75, 0xc8, 0xee, 0x7d,
- 0x98, 0xe7, 0x6a, 0xf1, 0xaa, 0xe3, 0x5e, 0x9c, 0x8d, 0x93, 0xf2, 0xe8, 0x06, 0x54, 0x14, 0x92,
- 0x1c, 0x60, 0x18, 0x23, 0x15, 0x75, 0xac, 0xca, 0x9d, 0x21, 0x45, 0xd1, 0xff, 0x59, 0x83, 0x85,
- 0xb4, 0xf6, 0x03, 0x6a, 0x07, 0xcf, 0x4b, 0x45, 0x5e, 0xda, 0x38, 0x65, 0x1b, 0xa3, 0xce, 0x1f,
- 0x99, 0x6e, 0x3a, 0xbe, 0x77, 0x9c, 0xfe, 0xf0, 0x74, 0xf1, 0x16, 0x40, 0x24, 0x83, 0x66, 0x21,
- 0x7f, 0x48, 0x8e, 0xc5, 0xc2, 0x61, 0xf6, 0x27, 0x5a, 0x88, 0x6d, 0x23, 0xb9, 0x6f, 0xbe, 0x9b,
- 0xbb, 0xa5, 0x7d, 0xb7, 0xf4, 0x67, 0x7f, 0x55, 0x3b, 0xf7, 0xc9, 0x17, 0x57, 0xce, 0xe9, 0xbf,
- 0xd4, 0x40, 0xcd, 0x20, 0xd0, 0xbb, 0x50, 0x3e, 0xf0, 0xfd, 0x2e, 0x27, 0xc9, 0x7b, 0x73, 0xfe,
- 0xc4, 0xf2, 0xee, 0xee, 0xee, 0x0e, 0x27, 0xe2, 0x88, 0x8f, 0xea, 0x00, 0xec, 0x07, 0x15, 0xd2,
- 0x85, 0xe8, 0xad, 0x0b, 0x93, 0x6e, 0x0a, 0x71, 0x45, 0x42, 0x24, 0xda, 0x42, 0x58, 0xfc, 0xdf,
- 0x8b, 0x4c, 0xb4, 0x85, 0x64, 0xc0, 0xd3, 0xff, 0x56, 0x83, 0xb9, 0xa1, 0x77, 0x1a, 0x68, 0x27,
- 0x4c, 0x2d, 0xc6, 0xed, 0x7e, 0x8c, 0x48, 0x42, 0xce, 0xec, 0xd7, 0xb7, 0x60, 0x41, 0x20, 0x72,
- 0xab, 0xd1, 0x9d, 0xf4, 0x4b, 0x0f, 0x38, 0xfd, 0xaf, 0x35, 0x80, 0xa8, 0x1e, 0x47, 0x7b, 0x30,
- 0x25, 0x86, 0x14, 0xcb, 0x7f, 0xb2, 0x4f, 0x70, 0x41, 0x9a, 0x98, 0x6a, 0x2a, 0x28, 0x38, 0x86,
- 0xc9, 0x6a, 0xcf, 0x8e, 0xdb, 0x73, 0x7c, 0xbe, 0xbb, 0x72, 0xf1, 0x57, 0xc8, 0xf7, 0x03, 0x06,
- 0x8e, 0x64, 0xf4, 0x4f, 0xf2, 0x30, 0x9f, 0x72, 0x01, 0xf9, 0xff, 0xba, 0xab, 0xf3, 0x0e, 0x4c,
- 0x8a, 0xc7, 0xa2, 0x34, 0x99, 0x0b, 0x88, 0xb7, 0xa4, 0x14, 0x07, 0x7c, 0xb4, 0x02, 0x15, 0xcb,
- 0x31, 0x45, 0xa3, 0xd6, 0x08, 0xaa, 0x79, 0x71, 0x17, 0x11, 0x91, 0xb1, 0x2a, 0x13, 0x2f, 0xff,
- 0x8b, 0x2f, 0x2f, 0xff, 0xf5, 0x1f, 0xc1, 0xdc, 0x50, 0x36, 0x93, 0x2d, 0x7c, 0x12, 0xfe, 0x0f,
- 0x58, 0x89, 0xf0, 0x29, 0xfe, 0xef, 0x4a, 0xf0, 0xf4, 0x5f, 0x69, 0x30, 0x9d, 0x48, 0xfb, 0x4e,
- 0x55, 0x2b, 0x3e, 0x54, 0x6b, 0xc5, 0xd3, 0x25, 0xcf, 0xb1, 0xaa, 0x51, 0xbf, 0x07, 0xe9, 0xef,
- 0x01, 0x93, 0x2b, 0xae, 0xbd, 0x7c, 0xc5, 0xf5, 0xdf, 0xe4, 0xa0, 0x1c, 0x3e, 0xa3, 0x40, 0xef,
- 0xc5, 0x56, 0xee, 0x92, 0xba, 0x72, 0x2f, 0xfa, 0x35, 0x21, 0xa8, 0x2c, 0xe3, 0x07, 0x50, 0x0e,
- 0x9f, 0xe1, 0x84, 0xb5, 0x70, 0xf6, 0xa7, 0x36, 0xe1, 0xa7, 0x0d, 0xdf, 0xf6, 0xe0, 0x08, 0x8f,
- 0xe5, 0x69, 0xc1, 0x3b, 0x99, 0xfb, 0x96, 0x6d, 0x5b, 0x54, 0xb6, 0xd2, 0xf3, 0xbc, 0x95, 0x1e,
- 0xe6, 0x69, 0x1b, 0x29, 0x32, 0x38, 0x55, 0x13, 0xed, 0xc0, 0x04, 0xf5, 0x49, 0x97, 0xca, 0xce,
- 0xd4, 0xbb, 0x99, 0x5e, 0x98, 0x90, 0x2e, 0xaf, 0xdb, 0x43, 0x17, 0x61, 0x14, 0x8a, 0x05, 0x90,
- 0xfe, 0x5f, 0x1a, 0x94, 0x02, 0x11, 0x74, 0x2d, 0xb6, 0x78, 0xd5, 0xc4, 0xe2, 0x71, 0xb9, 0xff,
- 0xb3, 0x6b, 0xa7, 0xf7, 0x35, 0x98, 0x8e, 0x5f, 0xca, 0x29, 0x95, 0xac, 0x76, 0x52, 0x25, 0x8b,
- 0xae, 0x41, 0xc9, 0xb0, 0x6d, 0xf7, 0xa3, 0x4d, 0xe7, 0x48, 0x76, 0x8f, 0xc2, 0x5b, 0xa6, 0x35,
- 0x49, 0xc7, 0xa1, 0x04, 0x3a, 0x82, 0x19, 0xa1, 0x17, 0xbd, 0x83, 0xca, 0x67, 0xbe, 0xec, 0x48,
- 0x0b, 0x36, 0x8d, 0x79, 0x96, 0x1e, 0x35, 0xe3, 0x98, 0x38, 0x69, 0xa4, 0x71, 0xf5, 0xf9, 0x97,
- 0x4b, 0xe7, 0x3e, 0xfb, 0x72, 0xe9, 0xdc, 0xe7, 0x5f, 0x2e, 0x9d, 0xfb, 0x64, 0xb0, 0xa4, 0x3d,
- 0x1f, 0x2c, 0x69, 0x9f, 0x0d, 0x96, 0xb4, 0xcf, 0x07, 0x4b, 0xda, 0x7f, 0x0c, 0x96, 0xb4, 0x3f,
- 0xfd, 0xcf, 0xa5, 0x73, 0x3f, 0xca, 0x1d, 0xad, 0xfc, 0x4f, 0x00, 0x00, 0x00, 0xff, 0xff, 0xf6,
- 0x63, 0xae, 0x93, 0x3f, 0x3d, 0x00, 0x00,
+ // 3895 bytes of a gzipped FileDescriptorProto
+ 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xdc, 0x5b, 0xcd, 0x6f, 0x1c, 0x47,
+ 0x76, 0x57, 0xcf, 0x0c, 0x87, 0x33, 0x6f, 0x28, 0x7e, 0x14, 0x25, 0x6b, 0xa4, 0xd5, 0x72, 0xe4,
+ 0x36, 0x6c, 0xc8, 0xb1, 0x3d, 0x5c, 0xd2, 0x92, 0xa2, 0xdd, 0x45, 0x36, 0xe0, 0x90, 0x94, 0x4c,
+ 0x2d, 0x25, 0x11, 0x35, 0x5c, 0xdb, 0x59, 0x1b, 0x9b, 0x34, 0x7b, 0x6a, 0x86, 0x6d, 0xf6, 0x74,
+ 0xb7, 0xbb, 0x7a, 0x68, 0x73, 0x81, 0x00, 0x46, 0x80, 0x85, 0xb1, 0xbb, 0x97, 0xec, 0x65, 0x91,
+ 0xe4, 0x92, 0x04, 0x41, 0x4e, 0x39, 0xe5, 0x10, 0x60, 0x81, 0xbd, 0x04, 0xc8, 0x1e, 0x74, 0xc8,
+ 0x61, 0x83, 0xe4, 0x60, 0x20, 0x8b, 0x41, 0x3c, 0x39, 0xe4, 0x7f, 0x50, 0x2e, 0x41, 0x7d, 0x74,
+ 0x77, 0x75, 0x4f, 0x0f, 0xd5, 0x43, 0x6a, 0x15, 0x27, 0x37, 0xce, 0xfb, 0xf8, 0xbd, 0xaa, 0xea,
+ 0x57, 0xf5, 0x3e, 0xaa, 0x08, 0x6b, 0x3d, 0x2b, 0x38, 0x1c, 0x1c, 0x34, 0x4d, 0xb7, 0xbf, 0xea,
+ 0x7a, 0xc4, 0xa1, 0x87, 0x56, 0x37, 0x58, 0x35, 0x3c, 0x6b, 0xf5, 0x60, 0x60, 0xd9, 0x9d, 0xd5,
+ 0xe3, 0xb5, 0xd5, 0x1e, 0x71, 0x88, 0x6f, 0x04, 0xa4, 0xd3, 0xf4, 0x7c, 0x37, 0x70, 0xd1, 0xcb,
+ 0xb1, 0x4a, 0x33, 0x52, 0x69, 0x1a, 0x9e, 0xd5, 0xe4, 0x2a, 0xcd, 0xe3, 0xb5, 0x6b, 0x6f, 0x29,
+ 0xa8, 0x3d, 0xb7, 0xe7, 0xae, 0x72, 0xcd, 0x83, 0x41, 0x97, 0xff, 0xe2, 0x3f, 0xf8, 0x5f, 0x02,
+ 0xf1, 0x9a, 0x7e, 0x74, 0x97, 0x36, 0x2d, 0x97, 0x9b, 0x35, 0x5d, 0x9f, 0x64, 0x58, 0xbd, 0x76,
+ 0x2b, 0x96, 0xe9, 0x1b, 0xe6, 0xa1, 0xe5, 0x10, 0xff, 0x64, 0xd5, 0x3b, 0xea, 0x31, 0x02, 0x5d,
+ 0xed, 0x93, 0xc0, 0xc8, 0xd2, 0x5a, 0x9d, 0xa4, 0xe5, 0x0f, 0x9c, 0xc0, 0xea, 0x93, 0x31, 0x85,
+ 0x3b, 0xcf, 0x52, 0xa0, 0xe6, 0x21, 0xe9, 0x1b, 0x63, 0x7a, 0x6f, 0x4f, 0xd2, 0x1b, 0x04, 0x96,
+ 0xbd, 0x6a, 0x39, 0x01, 0x0d, 0xfc, 0xb4, 0x92, 0xfe, 0x8f, 0x25, 0xb8, 0xda, 0xb2, 0x1c, 0xc3,
+ 0x3f, 0x69, 0xb1, 0x95, 0xc3, 0xe4, 0xe3, 0x01, 0xa1, 0xc1, 0x63, 0x2f, 0xb0, 0x5c, 0x87, 0xa2,
+ 0x3f, 0x82, 0x0a, 0x9b, 0x56, 0xc7, 0x08, 0x8c, 0xba, 0x76, 0x43, 0xbb, 0x59, 0x5b, 0xff, 0x46,
+ 0x53, 0x58, 0x69, 0xaa, 0x56, 0x9a, 0xde, 0x51, 0x8f, 0x11, 0x68, 0x93, 0x49, 0x37, 0x8f, 0xd7,
+ 0x9a, 0x8f, 0x0f, 0x3e, 0x22, 0x66, 0xf0, 0x90, 0x04, 0x46, 0x0b, 0x3d, 0x19, 0x36, 0x2e, 0x8c,
+ 0x86, 0x0d, 0x88, 0x69, 0x38, 0x42, 0x45, 0xaf, 0x41, 0xd9, 0xa0, 0xf7, 0x2c, 0x9b, 0xd4, 0x0b,
+ 0x37, 0xb4, 0x9b, 0xd5, 0xd6, 0xbc, 0x94, 0x2e, 0x6f, 0x70, 0x2a, 0x96, 0x5c, 0x74, 0x07, 0xe6,
+ 0x7d, 0x72, 0x6c, 0x51, 0xcb, 0x75, 0x36, 0xdd, 0x7e, 0xdf, 0x0a, 0xea, 0xc5, 0xa4, 0xbc, 0xa0,
+ 0xe2, 0x94, 0x14, 0xfa, 0x26, 0x2c, 0x84, 0x94, 0x87, 0x84, 0x52, 0xa3, 0x47, 0xea, 0x25, 0xae,
+ 0xb8, 0x20, 0x15, 0x67, 0x25, 0x19, 0xa7, 0xe5, 0x50, 0x0b, 0x50, 0x48, 0xda, 0x18, 0x04, 0x87,
+ 0xae, 0xff, 0xc8, 0xe8, 0x93, 0xfa, 0x0c, 0xd7, 0x8e, 0x26, 0x15, 0x73, 0x70, 0x86, 0x34, 0xda,
+ 0x86, 0xe5, 0x24, 0x75, 0xbb, 0x6f, 0x58, 0x76, 0xbd, 0xcc, 0x41, 0x96, 0x25, 0x48, 0x4d, 0x61,
+ 0xe1, 0x2c, 0x79, 0xf4, 0x5d, 0xb8, 0x9c, 0x9c, 0x57, 0x40, 0xc4, 0x68, 0x66, 0x39, 0xd0, 0x65,
+ 0x09, 0x74, 0x31, 0xc1, 0xc4, 0xd9, 0x3a, 0xe8, 0x11, 0xbc, 0x34, 0xc6, 0x10, 0xc3, 0xaa, 0x70,
+ 0xb4, 0x97, 0x24, 0xda, 0x7c, 0x92, 0x8b, 0x27, 0x68, 0xe9, 0xdf, 0x86, 0x25, 0xc5, 0x83, 0xda,
+ 0xee, 0xc0, 0x37, 0x89, 0xf2, 0x5d, 0xb5, 0xd3, 0xbe, 0xab, 0xfe, 0x13, 0x0d, 0x2e, 0xb7, 0xac,
+ 0xe0, 0x60, 0x60, 0x1e, 0x91, 0xe0, 0x3d, 0x72, 0xf0, 0x8e, 0xeb, 0x1e, 0x6d, 0x1a, 0x03, 0x4a,
+ 0xd0, 0xc7, 0x00, 0xa6, 0xdb, 0xef, 0xbb, 0x4e, 0xdb, 0x23, 0xa6, 0xf4, 0xbe, 0xdb, 0xcd, 0x67,
+ 0x6e, 0xfc, 0xe6, 0x26, 0x57, 0x52, 0xa1, 0x5a, 0xd7, 0xa4, 0x71, 0x34, 0xce, 0xc3, 0x8a, 0x11,
+ 0xfd, 0x67, 0x05, 0x98, 0xe1, 0x93, 0x78, 0x01, 0x8e, 0xff, 0x08, 0x4a, 0x94, 0x4d, 0xac, 0xc0,
+ 0xd1, 0xdf, 0xcc, 0x31, 0x31, 0xb1, 0xbc, 0x1e, 0x31, 0x5b, 0x73, 0x12, 0xb9, 0xc4, 0x7e, 0x61,
+ 0x8e, 0x83, 0xde, 0x85, 0x32, 0x0d, 0x8c, 0x60, 0x40, 0xf9, 0xc6, 0xa8, 0xad, 0x37, 0x73, 0x23,
+ 0x72, 0xad, 0xf8, 0x03, 0x89, 0xdf, 0x58, 0xa2, 0xe9, 0x7f, 0x53, 0x80, 0x1a, 0x97, 0xdb, 0x74,
+ 0x9d, 0xae, 0xd5, 0x7b, 0x01, 0x2b, 0xb3, 0x9f, 0x58, 0x99, 0xf5, 0xbc, 0xf3, 0x10, 0xe3, 0x9b,
+ 0xb8, 0x3e, 0x1f, 0xa6, 0xd6, 0xe7, 0xd6, 0x94, 0xb8, 0xa7, 0xaf, 0xd2, 0xaf, 0x34, 0x58, 0x50,
+ 0xa4, 0x77, 0x2d, 0x1a, 0xa0, 0x0f, 0xc7, 0x56, 0xaa, 0x99, 0x6f, 0xa5, 0x98, 0x36, 0x5f, 0xa7,
+ 0x45, 0x69, 0xad, 0x12, 0x52, 0x94, 0x55, 0x6a, 0xc3, 0x8c, 0x15, 0x90, 0x3e, 0xad, 0x17, 0x6e,
+ 0x14, 0xa7, 0xf9, 0xdc, 0x62, 0x80, 0xad, 0x8b, 0x12, 0x7a, 0x66, 0x87, 0x81, 0x60, 0x81, 0xa5,
+ 0xff, 0xa6, 0x98, 0x98, 0x06, 0x5b, 0x3e, 0x64, 0x42, 0x25, 0xf0, 0xad, 0x5e, 0x8f, 0xf8, 0xb4,
+ 0xae, 0x71, 0x5b, 0xb7, 0xf3, 0xda, 0xda, 0x17, 0x7a, 0x7b, 0xae, 0x6d, 0x99, 0x27, 0xf1, 0x6c,
+ 0x24, 0x99, 0xe2, 0x08, 0x18, 0x6d, 0x40, 0xd5, 0x1f, 0x38, 0x42, 0x50, 0x46, 0x82, 0x57, 0xa4,
+ 0x78, 0x15, 0x87, 0x8c, 0xa7, 0xc3, 0xc6, 0xbc, 0x88, 0x52, 0x21, 0x05, 0xc7, 0x5a, 0xc8, 0x48,
+ 0x9c, 0x17, 0xe2, 0x23, 0xbf, 0x95, 0xfb, 0xbc, 0xe0, 0x7e, 0x13, 0xf9, 0x65, 0x4c, 0x53, 0xcf,
+ 0x07, 0xd4, 0x81, 0xeb, 0x74, 0x60, 0x9a, 0x84, 0xd2, 0xee, 0xc0, 0xe6, 0x23, 0xa1, 0xef, 0x58,
+ 0x34, 0x70, 0xfd, 0x93, 0x5d, 0x8b, 0x85, 0x24, 0x16, 0x59, 0x66, 0x5a, 0x37, 0x46, 0xc3, 0xc6,
+ 0xf5, 0xf6, 0x29, 0x72, 0xf8, 0x54, 0x14, 0xf4, 0x3e, 0xd4, 0xbb, 0x86, 0x65, 0x93, 0x4e, 0x86,
+ 0x85, 0x19, 0x6e, 0xe1, 0xfa, 0x68, 0xd8, 0xa8, 0xdf, 0x9b, 0x20, 0x83, 0x27, 0x6a, 0xeb, 0x0f,
+ 0x60, 0x69, 0xcc, 0xa5, 0xd1, 0x6d, 0xa8, 0xd9, 0x06, 0x0d, 0xde, 0x25, 0x3e, 0x3b, 0xdb, 0xb9,
+ 0xa7, 0x16, 0xe3, 0xd0, 0xb4, 0x1b, 0xb3, 0xb0, 0x2a, 0xa7, 0xff, 0x42, 0x83, 0x2a, 0x07, 0x7b,
+ 0x01, 0xbe, 0xfe, 0x30, 0xe9, 0xeb, 0x37, 0xf3, 0xfa, 0xdf, 0x04, 0x2f, 0x07, 0xa8, 0x88, 0x91,
+ 0xbb, 0x3d, 0xfd, 0xf3, 0x92, 0xf4, 0xf8, 0x5d, 0xb7, 0x17, 0x66, 0x3d, 0xab, 0x50, 0x35, 0x5d,
+ 0x27, 0x30, 0xd8, 0x90, 0x65, 0xf8, 0x5a, 0x0a, 0x9d, 0x71, 0x33, 0x64, 0xe0, 0x58, 0x86, 0x05,
+ 0xbb, 0xae, 0x6b, 0xdb, 0xee, 0x27, 0xdc, 0x75, 0x2b, 0xf1, 0x29, 0x71, 0x8f, 0x53, 0xb1, 0xe4,
+ 0xa2, 0x37, 0xa1, 0xe2, 0xb1, 0x20, 0xea, 0xca, 0x53, 0xa8, 0x12, 0xcf, 0x7a, 0x4f, 0xd2, 0x71,
+ 0x24, 0x81, 0x6e, 0xc1, 0x1c, 0xb5, 0x1c, 0x93, 0xb4, 0x89, 0xe9, 0x3a, 0x1d, 0xca, 0xbd, 0xab,
+ 0xd8, 0x5a, 0x1c, 0x0d, 0x1b, 0x73, 0x6d, 0x85, 0x8e, 0x13, 0x52, 0xe8, 0x3d, 0xa8, 0xf2, 0xdf,
+ 0xfb, 0x96, 0x4c, 0x56, 0x6a, 0xeb, 0xbf, 0x93, 0xef, 0x53, 0x30, 0x8d, 0xd6, 0x45, 0x36, 0xc9,
+ 0x76, 0x08, 0x80, 0x63, 0x2c, 0xb4, 0x0e, 0xc0, 0xb2, 0x4f, 0x1a, 0x18, 0x7d, 0x8f, 0xf2, 0x0c,
+ 0xa6, 0x12, 0x6f, 0x98, 0xfd, 0x88, 0x83, 0x15, 0x29, 0xf4, 0x06, 0x54, 0x03, 0xc3, 0xb2, 0x77,
+ 0x2d, 0x87, 0x50, 0x9e, 0xab, 0x14, 0x85, 0x81, 0xfd, 0x90, 0x88, 0x63, 0x3e, 0x6a, 0x02, 0xd8,
+ 0xcc, 0x4d, 0x5b, 0x27, 0x01, 0xa1, 0x3c, 0x17, 0x29, 0xb6, 0xe6, 0x19, 0xf8, 0x6e, 0x44, 0xc5,
+ 0x8a, 0x04, 0x5b, 0x75, 0xc7, 0xfd, 0xc4, 0xb0, 0x82, 0x7a, 0x35, 0xb9, 0xea, 0x8f, 0xdc, 0xf7,
+ 0x0c, 0x2b, 0xc0, 0x92, 0x8b, 0x5e, 0x85, 0xd9, 0x63, 0xe9, 0xdc, 0xc0, 0x41, 0x6b, 0x2c, 0xed,
+ 0x0b, 0x9d, 0x3a, 0xe4, 0xe9, 0x3f, 0x09, 0x03, 0xdd, 0xe3, 0x41, 0xe0, 0x0d, 0x02, 0xf4, 0x6d,
+ 0x28, 0x04, 0xae, 0x74, 0xe6, 0x57, 0x94, 0x15, 0x6c, 0xb2, 0xf2, 0x20, 0x0e, 0x68, 0x98, 0x74,
+ 0x89, 0x4f, 0x1c, 0x93, 0xb4, 0xca, 0xa3, 0x61, 0xa3, 0xb0, 0xef, 0xe2, 0x42, 0xe0, 0xa2, 0xf7,
+ 0x01, 0xbc, 0x01, 0x3d, 0x6c, 0x13, 0xd3, 0x27, 0x81, 0x8c, 0x64, 0x37, 0xb3, 0x40, 0x76, 0x5d,
+ 0xd3, 0xb0, 0xd3, 0x48, 0x7c, 0xd6, 0x7b, 0x91, 0x3e, 0x56, 0xb0, 0x50, 0x07, 0x6a, 0x56, 0xdf,
+ 0xe8, 0x91, 0x5d, 0xe3, 0x80, 0xd8, 0xcc, 0x8d, 0x8a, 0x39, 0xcf, 0xb9, 0x9d, 0x48, 0x2b, 0xde,
+ 0xdd, 0x31, 0x8d, 0x62, 0x15, 0x56, 0xff, 0x13, 0x0d, 0x96, 0xf9, 0x62, 0xec, 0xb9, 0x34, 0x10,
+ 0xf9, 0x1e, 0x3f, 0x01, 0x5f, 0x85, 0x59, 0x76, 0x1e, 0x1a, 0x4e, 0x87, 0xc7, 0x82, 0xaa, 0x58,
+ 0xcb, 0x4d, 0x41, 0xc2, 0x21, 0x0f, 0x5d, 0x87, 0x92, 0xe1, 0xf7, 0xc4, 0x7e, 0xad, 0xb6, 0x2a,
+ 0x2c, 0x14, 0x6f, 0xf8, 0x3d, 0x8a, 0x39, 0x95, 0x7d, 0x38, 0x6a, 0xfa, 0x96, 0x37, 0x96, 0xc3,
+ 0xb7, 0x39, 0x15, 0x4b, 0xae, 0xfe, 0xab, 0x59, 0x98, 0x53, 0xab, 0x92, 0x17, 0x90, 0x7b, 0x7c,
+ 0x00, 0x95, 0x30, 0xcb, 0x95, 0x5f, 0x6d, 0x2d, 0xc7, 0xd2, 0x8a, 0x9c, 0x17, 0x4b, 0xc5, 0xd6,
+ 0x1c, 0xdb, 0xd0, 0xe1, 0x2f, 0x1c, 0x01, 0x22, 0x02, 0x8b, 0x32, 0xe0, 0x91, 0x4e, 0xeb, 0x84,
+ 0xaf, 0xbd, 0x8c, 0x53, 0xb9, 0xfc, 0xeb, 0xd2, 0x68, 0xd8, 0x58, 0xdc, 0x4f, 0x01, 0xe0, 0x31,
+ 0x48, 0xb4, 0x01, 0xa5, 0xae, 0xef, 0xf6, 0xf9, 0x79, 0x91, 0x13, 0x9a, 0x7f, 0xa1, 0x7b, 0xbe,
+ 0xdb, 0xc7, 0x5c, 0x15, 0xbd, 0x0f, 0xe5, 0x03, 0x9e, 0xd2, 0xcb, 0x13, 0x24, 0x57, 0xb2, 0x94,
+ 0xae, 0x01, 0x5a, 0xc0, 0xbe, 0xa9, 0x20, 0x63, 0x89, 0x87, 0xd6, 0x92, 0xd1, 0xa6, 0xcc, 0x37,
+ 0xe4, 0xc2, 0x69, 0x91, 0x06, 0x7d, 0x13, 0x8a, 0xc4, 0x39, 0xae, 0xcf, 0x72, 0x4f, 0xbf, 0x96,
+ 0x35, 0x9d, 0x6d, 0xe7, 0xf8, 0x5d, 0xc3, 0x6f, 0xd5, 0xe4, 0xa7, 0x2d, 0x6e, 0x3b, 0xc7, 0x98,
+ 0xe9, 0xa0, 0x23, 0xa8, 0x29, 0xcb, 0x53, 0xaf, 0x70, 0x88, 0x5b, 0x53, 0xa6, 0x2f, 0xa2, 0x86,
+ 0x88, 0xf6, 0x8c, 0xf2, 0x05, 0xb0, 0x8a, 0x8e, 0x7e, 0xac, 0xc1, 0xe5, 0x8e, 0x6b, 0x1e, 0x11,
+ 0xbf, 0x1d, 0xb0, 0x12, 0xbb, 0x77, 0x22, 0x03, 0x0a, 0x3f, 0x9f, 0x6a, 0xeb, 0x77, 0x73, 0xd8,
+ 0xdd, 0xca, 0xd2, 0x6f, 0x5d, 0x1d, 0x0d, 0x1b, 0x97, 0x33, 0x59, 0x38, 0xdb, 0x22, 0x1f, 0x0b,
+ 0xe5, 0x5f, 0x21, 0x3d, 0x16, 0xc8, 0x3d, 0x96, 0x76, 0x96, 0xbe, 0x18, 0x4b, 0x26, 0x0b, 0x67,
+ 0x5b, 0xd4, 0xff, 0x6d, 0x46, 0x1e, 0xac, 0xb2, 0x34, 0x7c, 0x1b, 0x4a, 0xc1, 0x89, 0x17, 0x16,
+ 0x86, 0x8d, 0x30, 0x57, 0xdf, 0x3f, 0xf1, 0xc8, 0xd3, 0x61, 0x63, 0x41, 0x11, 0x65, 0x24, 0xcc,
+ 0x85, 0x15, 0x8f, 0x2c, 0x3c, 0x67, 0x8f, 0x6c, 0x02, 0x88, 0x35, 0xec, 0xb2, 0x6a, 0x55, 0x9e,
+ 0x48, 0xec, 0x80, 0xd8, 0x8a, 0xa8, 0x58, 0x91, 0x40, 0xbb, 0x50, 0xec, 0xc9, 0x5c, 0x2f, 0xdf,
+ 0xe9, 0x70, 0xdf, 0x0a, 0xd4, 0x31, 0xcc, 0x32, 0x0f, 0xbd, 0x6f, 0x05, 0x98, 0xc1, 0xb0, 0xb2,
+ 0x8d, 0x9f, 0xbb, 0xb4, 0x3e, 0x93, 0x3b, 0x8f, 0xe7, 0xdb, 0x5c, 0xa2, 0x45, 0x67, 0x27, 0x27,
+ 0x52, 0x2c, 0xd1, 0x58, 0xb4, 0x66, 0xf9, 0x09, 0xf9, 0x34, 0xd8, 0xb2, 0x7c, 0xd9, 0x6f, 0x50,
+ 0xd2, 0xdb, 0x90, 0x83, 0x15, 0x29, 0xf4, 0x03, 0x98, 0x93, 0x5f, 0x50, 0x84, 0xad, 0xd9, 0x29,
+ 0xc3, 0x96, 0x48, 0x4d, 0x14, 0x04, 0x9c, 0xc0, 0x43, 0x7f, 0x08, 0xb3, 0x94, 0xff, 0x45, 0xa7,
+ 0xd8, 0x89, 0x42, 0x57, 0x5d, 0xc0, 0xa8, 0x73, 0x23, 0x58, 0x14, 0x87, 0xa8, 0xe8, 0x88, 0x4f,
+ 0xba, 0x6b, 0xf5, 0x1e, 0x1a, 0x1e, 0xdb, 0x75, 0xcc, 0xc6, 0xef, 0xe6, 0x2a, 0x01, 0xa4, 0x92,
+ 0x6a, 0x46, 0x5d, 0x2d, 0x09, 0x89, 0x15, 0x78, 0xfd, 0xdf, 0xc3, 0x04, 0x98, 0x07, 0x46, 0x23,
+ 0xa3, 0x5b, 0xf1, 0x9c, 0xab, 0x8f, 0xd4, 0x61, 0x56, 0xf8, 0x6d, 0x1e, 0x66, 0xfa, 0xe7, 0xb3,
+ 0xe1, 0xa6, 0x15, 0x55, 0xc2, 0x1a, 0xcc, 0x78, 0x87, 0x06, 0x0d, 0x77, 0xed, 0xd7, 0xc2, 0xc4,
+ 0x7a, 0x8f, 0x11, 0x9f, 0x0e, 0x1b, 0x20, 0xb2, 0x05, 0xf6, 0x0b, 0x0b, 0x49, 0x9e, 0x46, 0x1b,
+ 0x8e, 0x49, 0x6c, 0x9b, 0x74, 0x64, 0x62, 0x1c, 0xa7, 0xd1, 0x21, 0x03, 0xc7, 0x32, 0xe8, 0x0e,
+ 0x94, 0x7d, 0x62, 0x50, 0xd7, 0x91, 0xbb, 0x70, 0x25, 0xf4, 0x6d, 0xcc, 0xa9, 0x4f, 0x99, 0x77,
+ 0x89, 0xb2, 0x9b, 0xff, 0xc6, 0x52, 0x1a, 0xbd, 0x0e, 0xb3, 0xfd, 0xd3, 0x7b, 0x7b, 0x21, 0x1f,
+ 0x75, 0x61, 0x9e, 0x06, 0x86, 0x1f, 0x44, 0xf9, 0xea, 0x19, 0x52, 0x64, 0x34, 0x1a, 0x36, 0xe6,
+ 0xdb, 0x09, 0x14, 0x9c, 0x42, 0x45, 0x03, 0x58, 0x36, 0xdd, 0xbe, 0x67, 0x13, 0x76, 0x04, 0xc6,
+ 0xc6, 0xca, 0x53, 0x1b, 0xbb, 0x32, 0x1a, 0x36, 0x96, 0x37, 0xc7, 0xa1, 0x70, 0x16, 0x3e, 0xfa,
+ 0x3d, 0xa8, 0x74, 0x06, 0xbe, 0xc1, 0x88, 0x32, 0xdd, 0x7e, 0x39, 0x2c, 0x30, 0xb6, 0x24, 0xfd,
+ 0xe9, 0xb0, 0x71, 0x91, 0x65, 0xe8, 0xcd, 0x90, 0x80, 0x23, 0x15, 0x74, 0x00, 0xd7, 0x5c, 0x9e,
+ 0xfc, 0x8a, 0xa3, 0x4f, 0x24, 0x18, 0xe1, 0xf6, 0x96, 0xdd, 0x41, 0x5d, 0x02, 0x5e, 0x7b, 0x3c,
+ 0x51, 0x12, 0x9f, 0x82, 0x82, 0xee, 0x43, 0x59, 0x6c, 0x22, 0x19, 0x15, 0x73, 0xe5, 0x27, 0x20,
+ 0x3a, 0xbc, 0x4c, 0x0d, 0x4b, 0x75, 0xf4, 0x21, 0x94, 0x85, 0x19, 0x19, 0xd2, 0x6e, 0x4d, 0xd7,
+ 0xf0, 0x12, 0xc3, 0x8f, 0xcf, 0x4f, 0xf1, 0x1b, 0x4b, 0x4c, 0xb4, 0xcf, 0xdb, 0x45, 0xec, 0x5c,
+ 0xae, 0xf1, 0x7d, 0x96, 0xa7, 0x41, 0xd7, 0x66, 0x0a, 0x3b, 0x4e, 0xd7, 0x4d, 0xb4, 0x89, 0xf8,
+ 0xa9, 0x2c, 0xb0, 0xd8, 0xa9, 0x6c, 0xbb, 0xbd, 0xb6, 0x63, 0x79, 0x1e, 0x09, 0xea, 0x73, 0xc9,
+ 0x53, 0x79, 0x37, 0xe2, 0x60, 0x45, 0x4a, 0x37, 0x65, 0xd1, 0xae, 0x0e, 0x1b, 0x3d, 0x52, 0x8a,
+ 0x93, 0x3b, 0x67, 0x99, 0xf8, 0xbe, 0xab, 0xd6, 0x2b, 0xfa, 0xae, 0x4c, 0xf7, 0x93, 0x22, 0xe8,
+ 0xb6, 0x2c, 0x36, 0xb6, 0xac, 0x1e, 0xa1, 0x81, 0xdc, 0xfb, 0xc9, 0xea, 0x41, 0xb0, 0xb0, 0x2a,
+ 0xa7, 0xff, 0xb2, 0x04, 0x17, 0x25, 0x9c, 0x48, 0x05, 0xd0, 0xed, 0x44, 0xcc, 0x7f, 0x39, 0x15,
+ 0xf3, 0x97, 0x12, 0xc2, 0x4a, 0xd4, 0xf7, 0x61, 0x3e, 0x99, 0xdf, 0xc8, 0xe8, 0x7f, 0x27, 0x77,
+ 0x2a, 0x95, 0x40, 0x16, 0x5b, 0x37, 0x99, 0x48, 0xe1, 0x94, 0x05, 0x66, 0x33, 0x99, 0xc7, 0xc8,
+ 0x1c, 0xfd, 0x4e, 0xee, 0x94, 0x29, 0xc3, 0x66, 0x32, 0x61, 0xc2, 0x29, 0x0b, 0xcc, 0xa6, 0x39,
+ 0xa0, 0x81, 0xdb, 0x8f, 0x6c, 0x96, 0x72, 0xdb, 0xdc, 0xe4, 0x8a, 0x19, 0x36, 0x37, 0x13, 0x88,
+ 0x38, 0x65, 0x01, 0xfd, 0x5c, 0x83, 0x2b, 0x1f, 0x11, 0xe7, 0xc8, 0x72, 0xe8, 0x9e, 0xe5, 0x11,
+ 0xdb, 0x72, 0xe2, 0x19, 0x8b, 0x43, 0xf1, 0xf7, 0x73, 0x58, 0x7f, 0x90, 0x44, 0x48, 0x0e, 0xe3,
+ 0x6b, 0xa3, 0x61, 0xe3, 0xca, 0x83, 0x6c, 0x1b, 0x78, 0x92, 0x71, 0xfd, 0x47, 0x33, 0xd2, 0xe3,
+ 0xd5, 0x90, 0xa5, 0x1e, 0xf2, 0xda, 0x33, 0x0e, 0x79, 0x1f, 0xe6, 0xf9, 0x35, 0x97, 0x65, 0xca,
+ 0x4e, 0xff, 0x14, 0x5e, 0x73, 0x3f, 0xa1, 0x28, 0xa2, 0x25, 0x5f, 0xcd, 0x24, 0x03, 0xa7, 0x2c,
+ 0x20, 0x07, 0x2e, 0x0a, 0xf0, 0xd0, 0x64, 0x31, 0xf7, 0x85, 0xc5, 0x7d, 0x2b, 0x78, 0x27, 0xd2,
+ 0x13, 0x16, 0x97, 0x46, 0xc3, 0xc6, 0xc5, 0x04, 0x1d, 0x27, 0xe1, 0xd1, 0x00, 0x16, 0xf9, 0x8e,
+ 0xdb, 0x3c, 0x34, 0x9c, 0x9e, 0x58, 0x76, 0xe9, 0x33, 0x6f, 0xe7, 0xcd, 0x20, 0x85, 0xaa, 0x30,
+ 0xc8, 0x6b, 0xcb, 0x9d, 0x14, 0x20, 0x1e, 0x33, 0x21, 0xa7, 0x69, 0x1b, 0xd1, 0x34, 0x67, 0xa6,
+ 0x99, 0xe6, 0xae, 0x91, 0x3d, 0xcd, 0x98, 0x8e, 0x93, 0xf0, 0xe8, 0x87, 0xb0, 0x78, 0x90, 0xba,
+ 0x1d, 0x92, 0x41, 0xf4, 0x6e, 0xae, 0x02, 0x20, 0xe3, 0x62, 0x49, 0xcc, 0x35, 0xcd, 0xc2, 0x63,
+ 0x76, 0xf4, 0x5f, 0x94, 0x00, 0x8d, 0xb7, 0xb1, 0xd1, 0xad, 0xc4, 0x51, 0x76, 0x23, 0x75, 0x94,
+ 0x2d, 0xaa, 0x1a, 0xca, 0x49, 0xf6, 0x21, 0x94, 0xc5, 0x78, 0xa7, 0x68, 0x2b, 0xc8, 0x81, 0x48,
+ 0xb0, 0x2c, 0xa7, 0x90, 0x98, 0x2c, 0xb3, 0x96, 0xfe, 0x28, 0xfd, 0xee, 0x0c, 0xf0, 0x59, 0x5e,
+ 0x1e, 0xa2, 0xa2, 0x43, 0x19, 0x08, 0x84, 0x2f, 0x48, 0x4f, 0xbb, 0x3d, 0x9d, 0xa7, 0x85, 0x86,
+ 0x16, 0xa2, 0xd8, 0x21, 0xe8, 0x58, 0x85, 0x96, 0x0b, 0x65, 0x1b, 0x07, 0xd2, 0xb5, 0xce, 0xb1,
+ 0x50, 0x8a, 0x5b, 0x49, 0x4c, 0x44, 0xa0, 0x1a, 0x7d, 0x67, 0xe9, 0x48, 0x67, 0x30, 0x90, 0xed,
+ 0x41, 0x31, 0xb2, 0xfe, 0xcf, 0x65, 0x50, 0xb2, 0x78, 0xf4, 0x1d, 0x98, 0xa7, 0xc4, 0x3f, 0xb6,
+ 0x4c, 0xb2, 0x61, 0x9a, 0xee, 0xc0, 0x09, 0x23, 0x69, 0x74, 0xd3, 0xda, 0x4e, 0x70, 0x71, 0x4a,
+ 0x9a, 0xdf, 0xed, 0xf1, 0x80, 0x21, 0x9d, 0x27, 0xff, 0xdd, 0x5e, 0xaa, 0x48, 0x94, 0x8d, 0x2a,
+ 0x89, 0x96, 0xe8, 0x76, 0x15, 0x9f, 0x77, 0xb7, 0xeb, 0x07, 0x50, 0xa1, 0xc9, 0x68, 0xf6, 0x8d,
+ 0xfc, 0x89, 0x8a, 0x0c, 0x20, 0x51, 0x7b, 0x3c, 0x8a, 0x1a, 0x11, 0x26, 0x5b, 0x14, 0x99, 0xff,
+ 0xcd, 0x4c, 0xb7, 0x28, 0xcf, 0xc8, 0xfc, 0xfe, 0x00, 0xaa, 0x3e, 0x11, 0x0b, 0x44, 0xa5, 0x8b,
+ 0x64, 0x96, 0xc0, 0x58, 0x0a, 0x61, 0xf2, 0xf1, 0xc0, 0xf2, 0x49, 0x9f, 0x38, 0x01, 0x8d, 0x0b,
+ 0x9c, 0x90, 0x4b, 0x71, 0x8c, 0x86, 0x3e, 0x02, 0xf0, 0xa2, 0x7e, 0xaa, 0x2c, 0xaf, 0x73, 0x67,
+ 0x6f, 0xc9, 0x4e, 0x6c, 0x9c, 0x36, 0xc6, 0x74, 0xac, 0xa0, 0xa3, 0x0f, 0xe0, 0x6a, 0x5c, 0x21,
+ 0x6c, 0x11, 0xa3, 0xc3, 0x63, 0xac, 0xbc, 0x4a, 0x10, 0xcd, 0xf5, 0xaf, 0x8f, 0x86, 0x8d, 0xab,
+ 0x9b, 0x93, 0x84, 0xf0, 0x64, 0x7d, 0xd4, 0x87, 0x39, 0xc7, 0xed, 0x90, 0x36, 0xb1, 0x89, 0x19,
+ 0xb8, 0xbe, 0x4c, 0xe5, 0xf3, 0x94, 0xda, 0xa2, 0x29, 0x64, 0xd8, 0x8f, 0x14, 0x75, 0xd1, 0x38,
+ 0x50, 0x29, 0x38, 0x01, 0xaf, 0xff, 0xb9, 0x06, 0x19, 0x57, 0xf7, 0x09, 0xf7, 0xd5, 0x9e, 0xb7,
+ 0xfb, 0xbe, 0x06, 0x65, 0x1a, 0x77, 0xef, 0xd5, 0x26, 0xb5, 0x68, 0x6d, 0x48, 0xae, 0xfe, 0xf7,
+ 0x1a, 0x5c, 0xca, 0xea, 0x1f, 0x30, 0x3f, 0x8a, 0xba, 0x05, 0x72, 0x78, 0xf9, 0x5b, 0x29, 0xea,
+ 0x7d, 0x93, 0x80, 0xc0, 0x31, 0x1a, 0x3b, 0x4f, 0x3a, 0x84, 0x06, 0x96, 0xc3, 0xcb, 0xb6, 0x2d,
+ 0xcb, 0x97, 0x63, 0x8c, 0xce, 0x93, 0xad, 0x04, 0x17, 0xa7, 0xa4, 0xf5, 0x9f, 0x96, 0x60, 0x39,
+ 0x23, 0x6d, 0x44, 0xdb, 0xb2, 0x73, 0x3c, 0xc5, 0xa5, 0x47, 0x74, 0xd5, 0x9e, 0xe8, 0x1e, 0x83,
+ 0x37, 0xb0, 0xed, 0xf3, 0x5d, 0x7e, 0x84, 0xfa, 0x58, 0xc1, 0x0a, 0x5b, 0xc1, 0xc5, 0x33, 0xb4,
+ 0x82, 0x1f, 0x00, 0x22, 0x9f, 0x7a, 0x2e, 0x25, 0x32, 0xfd, 0x77, 0x79, 0x08, 0x28, 0xf1, 0xb6,
+ 0x44, 0xf4, 0x3e, 0x64, 0x7b, 0x4c, 0x02, 0x67, 0x68, 0xa1, 0x55, 0xa8, 0x76, 0x5d, 0xdf, 0x24,
+ 0x6c, 0x94, 0xfc, 0xf4, 0x51, 0x3a, 0x1b, 0xf7, 0x42, 0x06, 0x8e, 0x65, 0xd0, 0xfb, 0x71, 0xe7,
+ 0xab, 0x9c, 0xfb, 0xc2, 0x46, 0xcc, 0x99, 0x6f, 0xf6, 0xc9, 0x2d, 0xaf, 0x0d, 0x58, 0xe0, 0x0a,
+ 0x1b, 0x7b, 0x3b, 0x61, 0x4f, 0x5d, 0xbc, 0x09, 0xba, 0x22, 0x55, 0x44, 0x3f, 0x35, 0x66, 0xe3,
+ 0xb4, 0xbc, 0xfe, 0xa4, 0x04, 0xcb, 0x19, 0xc5, 0x52, 0x74, 0x8f, 0xa0, 0x9d, 0xe7, 0x1e, 0xe1,
+ 0xb7, 0xe5, 0x09, 0xaf, 0xc3, 0xac, 0xe3, 0x6e, 0x1a, 0xe6, 0x21, 0x91, 0x37, 0xa9, 0xd1, 0x12,
+ 0x3d, 0x12, 0x64, 0x1c, 0xf2, 0x43, 0xa7, 0x29, 0x9d, 0xc1, 0x69, 0xa6, 0xfe, 0xd0, 0xdf, 0x09,
+ 0x0b, 0xd6, 0xae, 0x65, 0x93, 0x3d, 0x23, 0x38, 0x94, 0xad, 0xd7, 0x78, 0x67, 0x26, 0xb8, 0x38,
+ 0x25, 0x8d, 0xbe, 0x0b, 0x55, 0xf1, 0x79, 0xfc, 0x1e, 0xcd, 0x71, 0xe3, 0x11, 0x0d, 0xa6, 0x15,
+ 0x2a, 0xe1, 0x58, 0x1f, 0x79, 0x70, 0x85, 0x67, 0x56, 0xec, 0xcc, 0xed, 0x5b, 0x3f, 0xe4, 0xdb,
+ 0x5f, 0x3e, 0xb1, 0x10, 0xbd, 0x9c, 0x3b, 0xac, 0x3e, 0xdb, 0xc9, 0x16, 0x79, 0x3a, 0x99, 0x85,
+ 0x27, 0xc1, 0xea, 0x3f, 0xd5, 0x20, 0xfb, 0x9e, 0x22, 0x39, 0x31, 0xed, 0x9c, 0x13, 0x7b, 0x35,
+ 0xfe, 0xf8, 0xa2, 0xaf, 0x58, 0xcb, 0xfa, 0xf0, 0xfa, 0x5f, 0x68, 0xb0, 0x9c, 0x51, 0xcf, 0x7d,
+ 0x35, 0xe2, 0xc6, 0x17, 0x85, 0xf4, 0xe0, 0xb6, 0x8f, 0x89, 0x13, 0x9c, 0xed, 0x76, 0x64, 0x5b,
+ 0xdc, 0x49, 0x14, 0x64, 0x7b, 0x31, 0x57, 0x31, 0xc6, 0x1b, 0x55, 0xc9, 0xcb, 0x88, 0x73, 0x1c,
+ 0xaf, 0x93, 0x2f, 0xbf, 0x4a, 0x2f, 0xfa, 0xf2, 0x4b, 0xff, 0x07, 0x0d, 0xe6, 0x93, 0x97, 0x2e,
+ 0xe8, 0xeb, 0x50, 0x1c, 0xf8, 0x96, 0x5c, 0xd4, 0x68, 0xf4, 0xdf, 0xc3, 0x3b, 0x98, 0xd1, 0x19,
+ 0xdb, 0x27, 0x5d, 0xf9, 0xc5, 0x22, 0x36, 0x26, 0x5d, 0xcc, 0xe8, 0x88, 0x40, 0xcd, 0xf3, 0xdd,
+ 0x4f, 0x4f, 0x44, 0x30, 0x9e, 0xe2, 0x81, 0xdd, 0x5e, 0xac, 0x15, 0xb7, 0xcd, 0x14, 0x22, 0x56,
+ 0x71, 0x79, 0x9a, 0x33, 0xde, 0x0c, 0xf8, 0x6a, 0xb8, 0xeb, 0x7f, 0x6b, 0x30, 0x2b, 0x9d, 0x06,
+ 0x7d, 0x0c, 0xf3, 0xbd, 0xc4, 0xf2, 0x4e, 0x31, 0xac, 0xd4, 0x65, 0x58, 0x74, 0x2e, 0x26, 0xe9,
+ 0x38, 0x65, 0x00, 0xfd, 0x31, 0x2c, 0xf5, 0xac, 0x20, 0x39, 0xa7, 0x29, 0x6e, 0x02, 0xef, 0xa7,
+ 0x75, 0x5b, 0x57, 0xa5, 0xe1, 0xa5, 0x31, 0x16, 0x1e, 0xb7, 0xa4, 0x7f, 0x2e, 0xbe, 0x4c, 0xaa,
+ 0x7f, 0xf1, 0xbf, 0xf1, 0x44, 0xf5, 0xef, 0x0a, 0x30, 0x3e, 0x64, 0xf6, 0x15, 0x4d, 0x51, 0x54,
+ 0x68, 0x99, 0xaf, 0xa2, 0x25, 0x97, 0x15, 0xd7, 0x06, 0x7f, 0x56, 0x3c, 0xc5, 0xda, 0x09, 0x53,
+ 0x9b, 0xae, 0x13, 0xf8, 0xae, 0xfd, 0x3d, 0x4a, 0x7c, 0xe5, 0x2d, 0x2f, 0xc7, 0xc2, 0x12, 0x93,
+ 0x15, 0xd7, 0x66, 0xf8, 0x34, 0x78, 0x8a, 0x57, 0x96, 0xe3, 0x06, 0x94, 0xec, 0x57, 0xc2, 0xe1,
+ 0x18, 0x79, 0x8a, 0xeb, 0x1e, 0xfd, 0xc7, 0x1a, 0x2c, 0xa6, 0x7b, 0x5d, 0x4c, 0x9f, 0xc7, 0xaf,
+ 0x9d, 0xad, 0x74, 0x27, 0x71, 0x47, 0x90, 0x71, 0xc8, 0x47, 0x0f, 0x60, 0x96, 0xe5, 0x31, 0x58,
+ 0x9e, 0x0d, 0x39, 0xb3, 0x20, 0x1e, 0x8d, 0xee, 0x09, 0x3d, 0x1c, 0x02, 0xe8, 0xff, 0xa2, 0x01,
+ 0x1a, 0xef, 0x86, 0xa0, 0x3d, 0xb8, 0x64, 0x1b, 0x34, 0x88, 0x2e, 0xe2, 0x76, 0x12, 0x43, 0xbb,
+ 0x2e, 0x87, 0x76, 0x69, 0x37, 0x43, 0x06, 0x67, 0x6a, 0x46, 0x79, 0x5b, 0xe1, 0xec, 0x79, 0xdb,
+ 0x6b, 0x50, 0xf6, 0xd8, 0x5a, 0x75, 0x64, 0x72, 0x15, 0x7d, 0xf1, 0x3d, 0x4e, 0xc5, 0x92, 0xab,
+ 0xb7, 0x01, 0xe2, 0x27, 0x44, 0xe8, 0x06, 0x94, 0x1c, 0xa3, 0x1f, 0x86, 0xae, 0xa8, 0x32, 0xe0,
+ 0x6f, 0xd1, 0x39, 0x07, 0xbd, 0x02, 0x33, 0xc7, 0x86, 0x3d, 0x08, 0x1f, 0xfb, 0x47, 0xcf, 0xf3,
+ 0xde, 0x65, 0x44, 0x2c, 0x78, 0xfa, 0x5f, 0x16, 0xa0, 0xa6, 0x5c, 0x71, 0x3f, 0xaf, 0xaa, 0xe4,
+ 0x3d, 0x98, 0xf1, 0x8c, 0xe0, 0x30, 0x7c, 0x44, 0xb8, 0x3e, 0xdd, 0x45, 0x3b, 0xcb, 0xce, 0xe2,
+ 0xf1, 0xb2, 0x5f, 0x14, 0x0b, 0xbc, 0x54, 0x92, 0x5b, 0x7c, 0x8e, 0x49, 0xee, 0x4b, 0x50, 0x30,
+ 0x28, 0x4f, 0x5c, 0xab, 0xe2, 0xb6, 0x66, 0x83, 0xe2, 0x82, 0x41, 0xf5, 0x1f, 0x69, 0xb0, 0x90,
+ 0x1a, 0x1b, 0x5a, 0x07, 0xa0, 0xd1, 0x2f, 0xf9, 0x09, 0xa2, 0x1e, 0x41, 0x2c, 0x87, 0x15, 0xa9,
+ 0x73, 0xd7, 0x91, 0xff, 0xaa, 0xc1, 0xf5, 0xd3, 0x2e, 0x00, 0x58, 0x75, 0x22, 0xbb, 0xfc, 0x51,
+ 0x3e, 0xac, 0x25, 0xab, 0x93, 0x07, 0x49, 0x36, 0x4e, 0xcb, 0xa3, 0xdb, 0x50, 0x53, 0x48, 0x72,
+ 0x80, 0x51, 0x2c, 0x55, 0xd4, 0xb1, 0x2a, 0x77, 0x8e, 0x54, 0x46, 0xff, 0x27, 0x0d, 0x2e, 0x65,
+ 0xb5, 0x29, 0x50, 0x2f, 0x7c, 0x86, 0x2a, 0xf2, 0xd7, 0xd6, 0x19, 0xdb, 0x1d, 0x4d, 0xfe, 0x18,
+ 0x75, 0xdb, 0x09, 0xfc, 0x93, 0xec, 0x07, 0xaa, 0xd7, 0xee, 0x02, 0xc4, 0x32, 0x68, 0x11, 0x8a,
+ 0x47, 0xe4, 0x44, 0x2c, 0x1c, 0x66, 0x7f, 0xa2, 0x4b, 0x89, 0x6d, 0x24, 0xf7, 0xcd, 0xb7, 0x0a,
+ 0x77, 0xb5, 0x6f, 0x55, 0xfe, 0xec, 0xaf, 0x1a, 0x17, 0x3e, 0xfb, 0xcd, 0x8d, 0x0b, 0xfa, 0xcf,
+ 0x34, 0x50, 0x33, 0x0d, 0xf4, 0x06, 0x54, 0x0f, 0x83, 0xc0, 0xe3, 0x24, 0x79, 0xbf, 0xce, 0x9f,
+ 0x62, 0xbe, 0xb3, 0xbf, 0xbf, 0xc7, 0x89, 0x38, 0xe6, 0xa3, 0x26, 0x00, 0xfb, 0x41, 0x85, 0x74,
+ 0x29, 0x7e, 0x13, 0xc3, 0xa4, 0xdb, 0x42, 0x5c, 0x91, 0x10, 0x09, 0xb9, 0x10, 0x16, 0xff, 0x1f,
+ 0x23, 0x13, 0x72, 0x21, 0x19, 0xf2, 0xf4, 0xbf, 0xd5, 0x60, 0x69, 0xec, 0x3d, 0x07, 0xda, 0x8b,
+ 0x52, 0x90, 0x69, 0xbb, 0x24, 0x13, 0x92, 0x95, 0x73, 0xfb, 0xf5, 0x5d, 0xb8, 0x24, 0x10, 0xb9,
+ 0xd5, 0xf8, 0xee, 0xfa, 0x99, 0x07, 0x9c, 0xfe, 0xd7, 0x1a, 0x40, 0x5c, 0xb7, 0xa3, 0x03, 0x98,
+ 0x13, 0x43, 0x4a, 0xe4, 0x49, 0xf9, 0x27, 0x78, 0x49, 0x9a, 0x98, 0x6b, 0x2b, 0x28, 0x38, 0x81,
+ 0xc9, 0x6a, 0xd4, 0xbe, 0x3b, 0x70, 0x02, 0xbe, 0xbb, 0x0a, 0xc9, 0xd7, 0xca, 0x0f, 0x43, 0x06,
+ 0x8e, 0x65, 0xf4, 0xcf, 0x8a, 0xb0, 0x9c, 0x71, 0x51, 0xf9, 0xff, 0xba, 0xfb, 0xf3, 0x3a, 0xcc,
+ 0x8a, 0x47, 0xa5, 0x34, 0x9d, 0x33, 0x88, 0x37, 0xa7, 0x14, 0x87, 0x7c, 0xb4, 0x06, 0x35, 0xcb,
+ 0x31, 0x45, 0x43, 0xd7, 0x08, 0xab, 0x7e, 0x71, 0x67, 0x11, 0x93, 0xb1, 0x2a, 0x93, 0x6c, 0x13,
+ 0x94, 0x9f, 0xdd, 0x26, 0xd0, 0xbf, 0x0f, 0x4b, 0x63, 0x59, 0x4f, 0xbe, 0xf0, 0x49, 0xf8, 0x3f,
+ 0x6a, 0xa5, 0xc2, 0xa7, 0xf8, 0xff, 0x2c, 0xc1, 0xd3, 0x7f, 0xae, 0xc1, 0x7c, 0x2a, 0x3d, 0x3c,
+ 0x53, 0x4d, 0xf9, 0x58, 0xad, 0x29, 0xcf, 0x96, 0x64, 0x27, 0xaa, 0x4b, 0xfd, 0x01, 0x64, 0xbf,
+ 0x1b, 0x4c, 0xaf, 0xb8, 0xf6, 0xec, 0x15, 0xd7, 0x7f, 0x59, 0x80, 0x6a, 0xf4, 0xdc, 0x02, 0xbd,
+ 0x95, 0x58, 0xb9, 0xab, 0xea, 0xca, 0x3d, 0x1d, 0x36, 0x84, 0xa0, 0xb2, 0x8c, 0x1f, 0x40, 0x35,
+ 0x7a, 0xae, 0x13, 0xd5, 0xcc, 0xf9, 0x9f, 0xe4, 0x44, 0x9f, 0x36, 0x7a, 0x03, 0x84, 0x63, 0x3c,
+ 0x96, 0xcf, 0x85, 0xef, 0x69, 0x1e, 0x5a, 0xb6, 0x6d, 0x51, 0xd9, 0x72, 0x2f, 0xf2, 0x96, 0x7b,
+ 0x94, 0xcf, 0x6d, 0x65, 0xc8, 0xe0, 0x4c, 0x4d, 0xb4, 0x07, 0x33, 0x34, 0x20, 0x1e, 0x95, 0x1d,
+ 0xac, 0x37, 0x72, 0xbd, 0x44, 0x21, 0x1e, 0xaf, 0xef, 0x23, 0x17, 0x61, 0x14, 0x8a, 0x05, 0x90,
+ 0xfe, 0x5f, 0x1a, 0x54, 0x42, 0x11, 0xf4, 0x66, 0x62, 0xf1, 0xea, 0xa9, 0xc5, 0xe3, 0x72, 0xff,
+ 0x67, 0xd7, 0x4e, 0x1f, 0x6a, 0x30, 0x9f, 0xbc, 0xbc, 0x53, 0x2a, 0x5e, 0xed, 0xb4, 0x8a, 0x17,
+ 0xbd, 0x09, 0x15, 0xc3, 0xb6, 0xdd, 0x4f, 0xb6, 0x9d, 0x63, 0xd9, 0x65, 0x8a, 0x6e, 0xa3, 0x36,
+ 0x24, 0x1d, 0x47, 0x12, 0xe8, 0x18, 0x16, 0x84, 0x5e, 0xfc, 0x5e, 0xaa, 0x98, 0xfb, 0x52, 0x24,
+ 0x2b, 0xd8, 0xb4, 0x96, 0x59, 0x7a, 0xd4, 0x4e, 0x62, 0xe2, 0xb4, 0x91, 0xd6, 0xcd, 0x27, 0x5f,
+ 0xae, 0x5c, 0xf8, 0xf5, 0x97, 0x2b, 0x17, 0xbe, 0xf8, 0x72, 0xe5, 0xc2, 0x67, 0xa3, 0x15, 0xed,
+ 0xc9, 0x68, 0x45, 0xfb, 0xf5, 0x68, 0x45, 0xfb, 0x62, 0xb4, 0xa2, 0xfd, 0xc7, 0x68, 0x45, 0xfb,
+ 0xd3, 0xff, 0x5c, 0xb9, 0xf0, 0xfd, 0xc2, 0xf1, 0xda, 0xff, 0x04, 0x00, 0x00, 0xff, 0xff, 0x3d,
+ 0xc4, 0x8b, 0x1e, 0x67, 0x3d, 0x00, 0x00,
}
diff --git a/vendor/github.com/openshift/api/build/v1/generated.proto b/vendor/github.com/openshift/api/build/v1/generated.proto
index 2f6b987b077a..e4b3b50e961a 100644
--- a/vendor/github.com/openshift/api/build/v1/generated.proto
+++ b/vendor/github.com/openshift/api/build/v1/generated.proto
@@ -780,6 +780,9 @@ message ImageChangeTrigger {
// will be used. Only one ImageChangeTrigger with an empty From reference is allowed in
// a build configuration.
optional k8s.io.api.core.v1.ObjectReference from = 2;
+
+ // paused is true if this trigger is temporarily disabled. Optional.
+ optional bool paused = 3;
}
// ImageLabel represents a label applied to the resulting image.
diff --git a/vendor/github.com/openshift/api/build/v1/types.go b/vendor/github.com/openshift/api/build/v1/types.go
index 7b1fe2a70aea..a33d9307cce1 100644
--- a/vendor/github.com/openshift/api/build/v1/types.go
+++ b/vendor/github.com/openshift/api/build/v1/types.go
@@ -968,6 +968,9 @@ type ImageChangeTrigger struct {
// will be used. Only one ImageChangeTrigger with an empty From reference is allowed in
// a build configuration.
From *corev1.ObjectReference `json:"from,omitempty" protobuf:"bytes,2,opt,name=from"`
+
+ // paused is true if this trigger is temporarily disabled. Optional.
+ Paused bool `json:"paused,omitempty" protobuf:"varint,3,opt,name=paused"`
}
// BuildTriggerPolicy describes a policy for a single trigger that results in a new Build.
diff --git a/vendor/github.com/openshift/api/build/v1/types_swagger_doc_generated.go b/vendor/github.com/openshift/api/build/v1/types_swagger_doc_generated.go
index fd52fe793bf3..013a3a9a4b2d 100644
--- a/vendor/github.com/openshift/api/build/v1/types_swagger_doc_generated.go
+++ b/vendor/github.com/openshift/api/build/v1/types_swagger_doc_generated.go
@@ -440,6 +440,7 @@ var map_ImageChangeTrigger = map[string]string{
"": "ImageChangeTrigger allows builds to be triggered when an ImageStream changes",
"lastTriggeredImageID": "lastTriggeredImageID is used internally by the ImageChangeController to save last used image ID for build",
"from": "from is a reference to an ImageStreamTag that will trigger a build when updated It is optional. If no From is specified, the From image from the build strategy will be used. Only one ImageChangeTrigger with an empty From reference is allowed in a build configuration.",
+ "paused": "paused is true if this trigger is temporarily disabled. Optional.",
}
func (ImageChangeTrigger) SwaggerDoc() map[string]string {
diff --git a/vendor/github.com/openshift/imagebuilder/Makefile b/vendor/github.com/openshift/imagebuilder/Makefile
new file mode 100644
index 000000000000..e9034286fe9c
--- /dev/null
+++ b/vendor/github.com/openshift/imagebuilder/Makefile
@@ -0,0 +1,11 @@
+build:
+ go build ./cmd/imagebuilder
+.PHONY: build
+
+test:
+ go test ./...
+.PHONY: test
+
+test-conformance:
+ go test -v -tags conformance -timeout 10m ./dockerclient
+.PHONY: test-conformance
diff --git a/vendor/github.com/openshift/imagebuilder/dispatchers_test.go b/vendor/github.com/openshift/imagebuilder/dispatchers_test.go
new file mode 100644
index 000000000000..0e162440b106
--- /dev/null
+++ b/vendor/github.com/openshift/imagebuilder/dispatchers_test.go
@@ -0,0 +1,35 @@
+package imagebuilder
+
+import (
+ "reflect"
+ "testing"
+
+ docker "github.com/fsouza/go-dockerclient"
+)
+
+func TestDispatchCopy(t *testing.T) {
+ mybuilder := Builder{
+ RunConfig: docker.Config{
+ WorkingDir: "/root",
+ Cmd: []string{"/bin/sh"},
+ Image: "alpine",
+ },
+ }
+ args := []string{"/go/src/github.com/kubernetes-incubator/service-catalog/controller-manager", "."}
+ flagArgs := []string{"--from=builder"}
+ original := "COPY --from=builder /go/src/github.com/kubernetes-incubator/service-catalog/controller-manager ."
+ if err := dispatchCopy(&mybuilder, args, nil, flagArgs, original); err != nil {
+ t.Errorf("dispatchCopy error: %v", err)
+ }
+ expectedPendingCopies := []Copy{
+ {
+ From: "builder",
+ Src: []string{"/go/src/github.com/kubernetes-incubator/service-catalog/controller-manager"},
+ Dest: "/root/", // destination must contain a trailing slash
+ Download: false,
+ },
+ }
+ if !reflect.DeepEqual(mybuilder.PendingCopies, expectedPendingCopies) {
+ t.Errorf("Expected %v, got %v\n", expectedPendingCopies, mybuilder.PendingCopies)
+ }
+}
diff --git a/vendor/github.com/openshift/imagebuilder/dockerclient/archive.go b/vendor/github.com/openshift/imagebuilder/dockerclient/archive.go
index 743bdaef841e..ee5ca9a30e51 100644
--- a/vendor/github.com/openshift/imagebuilder/dockerclient/archive.go
+++ b/vendor/github.com/openshift/imagebuilder/dockerclient/archive.go
@@ -165,9 +165,17 @@ func archiveFromDisk(directory string, src, dst string, allowDownload bool, excl
return nil, nil, err
}
+ // special case when we are archiving a single file at the root
+ if len(infos) == 1 && !infos[0].FileInfo.IsDir() && (infos[0].Path == "." || infos[0].Path == "/") {
+ glog.V(5).Infof("Archiving a file instead of a directory from %s", directory)
+ infos[0].Path = filepath.Base(directory)
+ infos[0].FromDir = false
+ directory = filepath.Dir(directory)
+ }
+
options := archiveOptionsFor(infos, dst, excludes)
- glog.V(4).Infof("Tar of directory %s %#v", directory, options)
+ glog.V(4).Infof("Tar of %s %#v", directory, options)
rc, err := archive.TarWithOptions(directory, options)
return rc, rc, err
}
@@ -252,12 +260,7 @@ func archiveFromFile(file string, src, dst string, excludes []string) (io.Reader
}
}
- // TODO: multiple sources also require treating dst as a directory
- isDestDir := strings.HasSuffix(dst, "/") || path.Base(dst) == "."
- dst = path.Clean(dst)
- mapperFn := archivePathMapper(src, dst, isDestDir)
-
- pm, err := fileutils.NewPatternMatcher(excludes)
+ mapper, _, err := newArchiveMapper(src, dst, excludes, true)
if err != nil {
return nil, nil, err
}
@@ -266,118 +269,173 @@ func archiveFromFile(file string, src, dst string, excludes []string) (io.Reader
if err != nil {
return nil, nil, err
}
+
+ r, err := transformArchive(f, true, mapper.Filter)
+ return r, f, err
+}
+
+func archiveFromContainer(in io.Reader, src, dst string, excludes []string) (io.Reader, string, error) {
+ mapper, archiveRoot, err := newArchiveMapper(src, dst, excludes, false)
+ if err != nil {
+ return nil, "", err
+ }
+
+ r, err := transformArchive(in, false, mapper.Filter)
+ return r, archiveRoot, err
+}
+
+func transformArchive(r io.Reader, compressed bool, fn TransformFileFunc) (io.Reader, error) {
pr, pw := io.Pipe()
go func() {
- in, err := archive.DecompressStream(f)
- if err != nil {
- pw.CloseWithError(err)
- return
- }
- err = FilterArchive(in, pw, func(h *tar.Header, r io.Reader) ([]byte, bool, bool, error) {
- h.Uid, h.Gid = 0, 0
- // skip a file if it doesn't match the src
- isDir := h.Typeflag == tar.TypeDir
- newName, ok := mapperFn(h.Name, isDir)
- if !ok {
- return nil, false, true, err
- }
- if newName == "." {
- return nil, false, true, nil
- }
-
- // skip based on excludes
- if ok, _ := pm.Matches(h.Name); ok {
- return nil, false, true, nil
+ if compressed {
+ in, err := archive.DecompressStream(r)
+ if err != nil {
+ pw.CloseWithError(err)
+ return
}
-
- h.Name = newName
- // include all files
- return nil, false, false, nil
- })
+ r = in
+ }
+ err := FilterArchive(r, pw, fn)
pw.CloseWithError(err)
}()
- return pr, closers{f.Close, pr.Close}, nil
+ return pr, nil
}
-func archiveFromContainer(in io.Reader, src, dst string, excludes []string) (io.Reader, io.Closer, error) {
- // TODO: multiple sources also require treating dst as a directory
- isDestDir := strings.HasSuffix(dst, "/") || path.Base(dst) == "."
- dst = path.Clean(dst)
- mapperFn := archivePathMapper("*", dst, isDestDir)
+type archiveMapper struct {
+ exclude *fileutils.PatternMatcher
+ rename func(name string, isDir bool) (string, bool)
+ prefix string
+ resetOwners bool
+}
- pm, err := fileutils.NewPatternMatcher(excludes)
+func newArchiveMapper(src, dst string, excludes []string, resetOwners bool) (*archiveMapper, string, error) {
+ ex, err := fileutils.NewPatternMatcher(excludes)
if err != nil {
- return nil, nil, err
+ return nil, "", err
}
- var srcName string
- if !strings.HasSuffix(src, "/") && path.Base(src) != "." {
- srcName = path.Base(src)
+ isDestDir := strings.HasSuffix(dst, "/") || path.Base(dst) == "."
+ dst = path.Clean(dst)
+
+ var prefix string
+ archiveRoot := src
+ srcPattern := "*"
+ switch {
+ case src == "":
+ return nil, "", fmt.Errorf("source may not be empty")
+ case src == ".", src == "/":
+ // no transformation necessary
+ case strings.HasSuffix(src, "/"), strings.HasSuffix(src, "/."):
+ src = path.Clean(src)
+ archiveRoot = src
+ if archiveRoot != "/" && archiveRoot != "." {
+ prefix = path.Base(archiveRoot)
+ }
+ default:
+ src = path.Clean(src)
+ srcPattern = path.Base(src)
+ archiveRoot = path.Dir(src)
+ if archiveRoot != "/" && archiveRoot != "." {
+ prefix = path.Base(archiveRoot)
+ }
}
- pr, pw := io.Pipe()
- go func() {
- err = FilterArchive(in, pw, func(h *tar.Header, r io.Reader) ([]byte, bool, bool, error) {
- isDir := h.Typeflag == tar.TypeDir
- // special case: container output tar has a single file matching the src name
- if !isDir && h.Name == srcName && !isDestDir {
- h.Name = dst
- return nil, false, false, nil
- }
- // skip a file if it doesn't match the src
- newName, ok := mapperFn(h.Name, isDir)
- if !ok {
- return nil, false, true, err
- }
- if newName == "." {
- return nil, false, true, nil
- }
+ mapperFn := archivePathMapper(srcPattern, dst, isDestDir)
- // skip based on excludes
- if ok, _ := pm.Matches(h.Name); ok {
- return nil, false, true, nil
- }
+ return &archiveMapper{
+ exclude: ex,
+ rename: mapperFn,
+ prefix: prefix,
+ resetOwners: resetOwners,
+ }, archiveRoot, nil
+}
- glog.V(5).Infof("Filtering archive %s -> %s", h.Name, newName)
- h.Name = newName
- // include all files
- return nil, false, false, nil
- })
- pw.CloseWithError(err)
- }()
- return pr, pr, nil
+func (m *archiveMapper) Filter(h *tar.Header, r io.Reader) ([]byte, bool, bool, error) {
+ if m.resetOwners {
+ h.Uid, h.Gid = 0, 0
+ }
+ // Trim a leading path, the prefix segment (which has no leading or trailing slashes), and
+ // the final leader segment. Depending on the segment, Docker could return /prefix/ or prefix/.
+ h.Name = strings.TrimPrefix(h.Name, "/")
+ if !strings.HasPrefix(h.Name, m.prefix) {
+ return nil, false, true, nil
+ }
+ h.Name = strings.TrimPrefix(strings.TrimPrefix(h.Name, m.prefix), "/")
+
+ // skip a file if it doesn't match the src
+ isDir := h.Typeflag == tar.TypeDir
+ newName, ok := m.rename(h.Name, isDir)
+ if !ok {
+ return nil, false, true, nil
+ }
+ if newName == "." {
+ return nil, false, true, nil
+ }
+ // skip based on excludes
+ if ok, _ := m.exclude.Matches(h.Name); ok {
+ return nil, false, true, nil
+ }
+ h.Name = newName
+ // include all files
+ return nil, false, false, nil
}
func archiveOptionsFor(infos []CopyInfo, dst string, excludes []string) *archive.TarOptions {
dst = trimLeadingPath(dst)
+ dstIsDir := strings.HasSuffix(dst, "/") || dst == "." || dst == "/" || strings.HasSuffix(dst, "/.")
+ dst = trimTrailingSlash(dst)
+ dstIsRoot := dst == "." || dst == "/"
+
options := &archive.TarOptions{
ChownOpts: &idtools.IDPair{UID: 0, GID: 0},
}
+
pm, err := fileutils.NewPatternMatcher(excludes)
if err != nil {
return options
}
+
for _, info := range infos {
if ok, _ := pm.Matches(info.Path); ok {
continue
}
- options.IncludeFiles = append(options.IncludeFiles, info.Path)
+
+ srcIsDir := strings.HasSuffix(info.Path, "/") || info.Path == "." || info.Path == "/" || strings.HasSuffix(info.Path, "/.")
+ infoPath := trimTrailingSlash(info.Path)
+
+ options.IncludeFiles = append(options.IncludeFiles, infoPath)
if len(dst) == 0 {
continue
}
if options.RebaseNames == nil {
options.RebaseNames = make(map[string]string)
}
- if info.FromDir || strings.HasSuffix(dst, "/") || strings.HasSuffix(dst, "/.") || dst == "." {
- if strings.HasSuffix(info.Path, "/") {
- options.RebaseNames[info.Path] = dst
- } else {
- options.RebaseNames[info.Path] = path.Join(dst, path.Base(info.Path))
- }
- } else {
- options.RebaseNames[info.Path] = dst
+
+ glog.V(6).Infof("len=%d info.FromDir=%t info.IsDir=%t dstIsRoot=%t dstIsDir=%t srcIsDir=%t", len(infos), info.FromDir, info.IsDir(), dstIsRoot, dstIsDir, srcIsDir)
+ switch {
+ case len(infos) > 1 && dstIsRoot:
+ // copying multiple things into root, no rename necessary ([Dockerfile, dir] -> [Dockerfile, dir])
+ case len(infos) > 1:
+ // put each input into the target, which is assumed to be a directory ([Dockerfile, dir] -> [a/Dockerfile, a/dir])
+ options.RebaseNames[infoPath] = path.Join(dst, path.Base(infoPath))
+ case info.FileInfo.IsDir() && dstIsDir:
+ // mapping a directory to an explicit directory ([dir] -> [a])
+ options.RebaseNames[infoPath] = dst
+ case info.FileInfo.IsDir():
+ // mapping a directory to an implicit directory ([Dockerfile] -> [dir/Dockerfile])
+ options.RebaseNames[infoPath] = path.Join(dst, path.Base(infoPath))
+ case info.FromDir:
+ // this is a file that was part of an explicit directory request, no transformation
+ options.RebaseNames[infoPath] = path.Join(dst, path.Base(infoPath))
+ case dstIsDir:
+ // mapping what is probably a file to a non-root directory ([Dockerfile] -> [dir/Dockerfile])
+ options.RebaseNames[infoPath] = path.Join(dst, path.Base(infoPath))
+ default:
+ // a single file mapped to another single file ([Dockerfile] -> [Dockerfile.2])
+ options.RebaseNames[infoPath] = dst
}
}
+
options.ExcludePatterns = excludes
return options
}
@@ -390,3 +448,29 @@ func sourceToDestinationName(src, dst string, forceDir bool) string {
return dst
}
}
+
+// logArchiveOutput prints log info about the provided tar file as it is streamed. If an
+// error occurs the remainder of the pipe is read to prevent blocking.
+func logArchiveOutput(r io.Reader, prefix string) {
+ pr, pw := io.Pipe()
+ r = ioutil.NopCloser(io.TeeReader(r, pw))
+ go func() {
+ err := func() error {
+ tr := tar.NewReader(pr)
+ for {
+ h, err := tr.Next()
+ if err != nil {
+ return err
+ }
+ glog.Infof("%s %s (%d %s)", prefix, h.Name, h.Size, h.FileInfo().Mode())
+ if _, err := io.Copy(ioutil.Discard, tr); err != nil {
+ return err
+ }
+ }
+ }()
+ if err != io.EOF {
+ glog.Infof("%s: unable to log archive output: %v", prefix, err)
+ io.Copy(ioutil.Discard, pr)
+ }
+ }()
+}
diff --git a/vendor/github.com/openshift/imagebuilder/dockerclient/archive_test.go b/vendor/github.com/openshift/imagebuilder/dockerclient/archive_test.go
index 5d38da0ad209..89961c769b63 100644
--- a/vendor/github.com/openshift/imagebuilder/dockerclient/archive_test.go
+++ b/vendor/github.com/openshift/imagebuilder/dockerclient/archive_test.go
@@ -13,6 +13,49 @@ import (
"github.com/docker/docker/pkg/archive"
)
+type archiveGenerator struct {
+ Headers []*tar.Header
+}
+
+func newArchiveGenerator() *archiveGenerator {
+ return &archiveGenerator{}
+}
+
+func (g *archiveGenerator) File(name string) *archiveGenerator {
+ g.Headers = append(g.Headers, &tar.Header{Name: name, Size: 1})
+ return g
+}
+
+func (g *archiveGenerator) Dir(name string) *archiveGenerator {
+ g.Headers = append(g.Headers, &tar.Header{Name: name, Typeflag: tar.TypeDir})
+ return g
+}
+
+func (g *archiveGenerator) Reader() io.Reader {
+ pr, pw := io.Pipe()
+ go func() {
+ err := func() error {
+ w := tar.NewWriter(pw)
+ for _, h := range g.Headers {
+ if err := w.WriteHeader(h); err != nil {
+ return err
+ }
+ if h.Typeflag&tar.TypeDir == tar.TypeDir {
+ continue
+ }
+ for i := int64(0); i < h.Size; i++ {
+ if _, err := w.Write([]byte{byte(i)}); err != nil {
+ return err
+ }
+ }
+ }
+ return w.Flush()
+ }()
+ pw.CloseWithError(err)
+ }()
+ return pr
+}
+
func Test_archiveFromFile(t *testing.T) {
f, err := ioutil.TempFile("", "test-tar")
if err != nil {
@@ -33,6 +76,7 @@ func Test_archiveFromFile(t *testing.T) {
testArchive := f.Name()
testCases := []struct {
file string
+ gen *archiveGenerator
src string
dst string
excludes []string
@@ -213,3 +257,184 @@ func Test_archiveFromFile(t *testing.T) {
})
}
}
+
+func Test_archiveFromContainer(t *testing.T) {
+ testCases := []struct {
+ gen *archiveGenerator
+ src string
+ dst string
+ excludes []string
+ expect []string
+ path string
+ }{
+ {
+ gen: newArchiveGenerator().File("file").Dir("test").File("test/file2"),
+ src: "/*",
+ dst: "test",
+ path: "/",
+ expect: []string{
+ "test/file",
+ "test/test",
+ "test/test/file2",
+ },
+ },
+ {
+ gen: newArchiveGenerator().File("file").Dir("test").File("test/file2"),
+ src: "/",
+ dst: "test",
+ path: "/",
+ expect: []string{
+ "test/file",
+ "test/test",
+ "test/test/file2",
+ },
+ },
+ {
+ gen: newArchiveGenerator().File("file").Dir("test").File("test/file2"),
+ src: ".",
+ dst: "test",
+ path: ".",
+ expect: []string{
+ "test/file",
+ "test/test",
+ "test/test/file2",
+ },
+ },
+ {
+ gen: newArchiveGenerator().File("file").Dir("test").File("test/file2"),
+ src: ".",
+ dst: "test/",
+ path: ".",
+ expect: []string{
+ "test/file",
+ "test/test",
+ "test/test/file2",
+ },
+ },
+ {
+ gen: newArchiveGenerator().File("file").Dir("test").File("test/file2"),
+ src: ".",
+ dst: "/test",
+ path: ".",
+ expect: []string{
+ "/test/file",
+ "/test/test",
+ "/test/test/file2",
+ },
+ },
+ {
+ gen: newArchiveGenerator().File("file").Dir("test").File("test/file2"),
+ src: ".",
+ dst: "/test/",
+ path: ".",
+ expect: []string{
+ "/test/file",
+ "/test/test",
+ "/test/test/file2",
+ },
+ },
+ {
+ gen: newArchiveGenerator().File("b/file").Dir("b/test").File("b/test/file2"),
+ src: "/a/b/",
+ dst: "/b",
+ path: "/a/b",
+ expect: []string{
+ "/b/file",
+ "/b/test",
+ "/b/test/file2",
+ },
+ },
+ {
+ gen: newArchiveGenerator().File("/b/file").Dir("/b/test").File("/b/test/file2"),
+ src: "/a/b/*",
+ dst: "/b",
+ path: "/a/b",
+ expect: []string{
+ "/b/file",
+ "/b/test",
+ "/b/test/file2",
+ },
+ },
+
+ // DownloadFromContainer returns tar archive paths prefixed with a slash when
+ // the base directory is the root
+ {
+ gen: newArchiveGenerator().File("/a").Dir("/b").File("/b/1"),
+ src: "/a",
+ dst: "/",
+ path: "/",
+ expect: []string{
+ "/a",
+ },
+ },
+ {
+ gen: newArchiveGenerator().File("/a").Dir("/b").File("/b/1"),
+ src: "/a",
+ dst: "/a",
+ path: "/",
+ expect: []string{
+ "/a",
+ },
+ },
+ {
+ gen: newArchiveGenerator().Dir("b/").File("b/1").File("b/2"),
+ src: "/a/b/",
+ dst: "/b/",
+ path: "/a/b",
+ expect: []string{
+ "/b",
+ "/b/1",
+ "/b/2",
+ },
+ },
+ {
+ gen: newArchiveGenerator().Dir("").File("b"),
+ src: "/a/b",
+ dst: "/a",
+ path: "/a",
+ expect: nil,
+ },
+ {
+ gen: newArchiveGenerator().Dir("a/").File("a/b"),
+ src: "/a/b",
+ dst: "/a",
+ path: "/a",
+ expect: []string{
+ "/a",
+ },
+ },
+ }
+ for i := range testCases {
+ testCase := testCases[i]
+ t.Run(fmt.Sprintf("%d", i), func(t *testing.T) {
+ r, path, err := archiveFromContainer(
+ testCase.gen.Reader(),
+ testCase.src,
+ testCase.dst,
+ testCase.excludes,
+ )
+ if err != nil {
+ t.Fatal(err)
+ }
+ if path != testCase.path {
+ t.Errorf("unexpected path: %s", path)
+ }
+ tr := tar.NewReader(r)
+ var found []string
+ for {
+ h, err := tr.Next()
+ if err != nil {
+ if err == io.EOF {
+ break
+ }
+ t.Fatal(err)
+ }
+ found = append(found, h.Name)
+ }
+ sort.Strings(found)
+ if !reflect.DeepEqual(testCase.expect, found) {
+ t.Errorf("unexpected files:\n%v\n%v", testCase.expect, found)
+ }
+ })
+ }
+}
diff --git a/vendor/github.com/openshift/imagebuilder/dockerclient/client.go b/vendor/github.com/openshift/imagebuilder/dockerclient/client.go
index a278f1c8bc91..4340191c51f0 100644
--- a/vendor/github.com/openshift/imagebuilder/dockerclient/client.go
+++ b/vendor/github.com/openshift/imagebuilder/dockerclient/client.go
@@ -458,7 +458,7 @@ func (e *ClientExecutor) PopulateTransientMounts(opts docker.CreateContainerOpti
source := mount.SourcePath
copies = append(copies, imagebuilder.Copy{
FromFS: true,
- Src: []string{source},
+ Src: []string{filepath.Join(e.Directory, source)},
Dest: filepath.Join(e.ContainerTransientMount, strconv.Itoa(i)),
})
}
@@ -730,7 +730,7 @@ func (e *ClientExecutor) CopyContainer(container *docker.Container, excludes []s
for _, c := range copies {
// TODO: reuse source
for _, src := range c.Src {
- glog.V(4).Infof("Archiving %s %t", src, c.Download)
+ glog.V(4).Infof("Archiving %s download=%t fromFS=%t from=%s", src, c.Download, c.FromFS, c.From)
var r io.Reader
var closer io.Closer
var err error
@@ -744,6 +744,9 @@ func (e *ClientExecutor) CopyContainer(container *docker.Container, excludes []s
}
glog.V(5).Infof("Uploading to %s at %s", container.ID, c.Dest)
+ if glog.V(6) {
+ logArchiveOutput(r, "Archive file for %s")
+ }
err = e.Client.UploadToContainer(container.ID, docker.UploadToContainerOptions{
InputStream: r,
Path: "/",
@@ -777,6 +780,7 @@ func (e *ClientExecutor) archiveFromContainer(from string, src, dst string) (io.
if other.Container == nil {
return nil, nil, fmt.Errorf("the stage %q has not been built yet", from)
}
+ glog.V(5).Infof("Using container %s as input for archive request", other.Container.ID)
containerID = other.Container.ID
} else {
glog.V(5).Infof("Creating a container temporarily for image input from %q in %s", from, src)
@@ -797,19 +801,20 @@ func (e *ClientExecutor) archiveFromContainer(from string, src, dst string) (io.
}
pr, pw := io.Pipe()
- ar, arclose, err := archiveFromContainer(pr, src, dst, nil)
+ ar, archiveRoot, err := archiveFromContainer(pr, src, dst, nil)
if err != nil {
pr.Close()
return nil, nil, err
}
go func() {
+ glog.V(6).Infof("Download from container %s at path %s", containerID, archiveRoot)
err := e.Client.DownloadFromContainer(containerID, docker.DownloadFromContainerOptions{
OutputStream: pw,
- Path: src,
+ Path: archiveRoot,
})
pw.CloseWithError(err)
}()
- return ar, closers{pr.Close, arclose.Close}, nil
+ return ar, pr, nil
}
// TODO: this does not support decompressing nested archives for ADD (when the source is a compressed file)
@@ -818,17 +823,21 @@ func (e *ClientExecutor) Archive(fromFS bool, src, dst string, allowDownload boo
if !allowDownload {
return nil, nil, fmt.Errorf("source can't be a URL")
}
+ glog.V(5).Infof("Archiving %s -> %s from URL", src, dst)
return archiveFromURL(src, dst, e.TempDir)
}
// the input is from the filesystem, use the source as the input
if fromFS {
+ glog.V(5).Infof("Archiving %s %s -> %s from a filesystem location", src, ".", dst)
return archiveFromDisk(src, ".", dst, allowDownload, excludes)
}
// if the context is in archive form, read from it without decompressing
if len(e.ContextArchive) > 0 {
+ glog.V(5).Infof("Archiving %s %s -> %s from context archive", e.ContextArchive, src, dst)
return archiveFromFile(e.ContextArchive, src, dst, excludes)
}
// if the context is a directory, we only allow relative includes
+ glog.V(5).Infof("Archiving %q %q -> %q from disk", e.Directory, src, dst)
return archiveFromDisk(e.Directory, src, dst, allowDownload, excludes)
}
diff --git a/vendor/github.com/openshift/imagebuilder/dockerclient/conformance_test.go b/vendor/github.com/openshift/imagebuilder/dockerclient/conformance_test.go
index f000ed573e09..b34f6bc326fa 100644
--- a/vendor/github.com/openshift/imagebuilder/dockerclient/conformance_test.go
+++ b/vendor/github.com/openshift/imagebuilder/dockerclient/conformance_test.go
@@ -60,7 +60,7 @@ func TestMount(t *testing.T) {
out := &bytes.Buffer{}
e.Out, e.ErrOut = out, out
- e.Directory = tmpDir
+ e.Directory = "."
e.Tag = filepath.Base(tmpDir)
e.TransientMounts = []Mount{
{SourcePath: "testdata/volume/", DestinationPath: "/tmp/test"},
@@ -87,6 +87,69 @@ func TestMount(t *testing.T) {
}
}
+func TestCopyFrom(t *testing.T) {
+ c, err := docker.NewClientFromEnv()
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ testCases := []struct {
+ name string
+ create string
+ copy string
+ extra string
+ expect string
+ }{
+ {name: "copy file to root", create: "touch /a /b", copy: "/a /", expect: "[[ -f /a ]]"},
+ {name: "copy file to same file", create: "touch /a", copy: "/a /a", expect: "[[ -f /a ]]"},
+ {name: "copy file to workdir", create: "touch /a", extra: "WORKDIR /b", copy: "/a .", expect: "[[ -f /b/a ]]"},
+ {name: "copy file to workdir rename", create: "touch /a", extra: "WORKDIR /b", copy: "/a ./b", expect: "[[ -f /b/b ]]"},
+ {name: "copy folder contents to higher level", create: "mkdir -p /a/b && touch /a/b/1 /a/b/2", copy: "/a/b/ /b/", expect: "[[ -f /b/1 && -f /b/2 && ! -e /a ]]"},
+ {name: "copy wildcard folder contents to higher level", create: "mkdir -p /a/b && touch /a/b/1 /a/b/2", copy: "/a/b/* /b/", expect: "ls -al /b/1 /b/2 /b && ! ls -al /a /b/a /b/b"},
+ {name: "copy folder with dot contents to higher level", create: "mkdir -p /a/b && touch /a/b/1 /a/b/2", copy: "/a/b/. /b/", expect: "ls -al /b/1 /b/2 /b && ! ls -al /a /b/a /b/b"},
+ {name: "copy root file to different root name", create: "touch /b", copy: "/b /a", expect: "ls -al /a && ! ls -al /b"},
+ {name: "copy nested file to different root name", create: "mkdir -p /a && touch /a/b", copy: "/a/b /a", expect: "ls -al /a && ! ls -al /b"},
+ {name: "copy file to deeper directory with explicit slash", create: "mkdir -p /a && touch /a/1", copy: "/a/1 /a/b/c/", expect: "ls -al /a/b/c/1 && ! ls -al /a/b/1"},
+ {name: "copy file to deeper directory without explicit slash", create: "mkdir -p /a && touch /a/1", copy: "/a/1 /a/b/c", expect: "ls -al /a/b/c && ! ls -al /a/b/1"},
+ {name: "copy directory to deeper directory without explicit slash", create: "mkdir -p /a && touch /a/1", copy: "/a /a/b/c", expect: "ls -al /a/b/c/1 && ! ls -al /a/b/1"},
+ }
+ for i, testCase := range testCases {
+ name := fmt.Sprintf("%d", i)
+ if len(testCase.name) > 0 {
+ name = testCase.name
+ }
+ test := testCase
+ t.Run(name, func(t *testing.T) {
+ t.Parallel()
+ e := NewClientExecutor(c)
+ defer e.Release()
+
+ out := &bytes.Buffer{}
+ e.Out, e.ErrOut = out, out
+ b := imagebuilder.NewBuilder(nil)
+ dockerfile := fmt.Sprintf(`
+ FROM busybox AS base
+ RUN %s
+ FROM busybox
+ %s
+ COPY --from=base %s
+ RUN %s
+ `, test.create, test.extra, test.copy, test.expect,
+ )
+ t.Log(dockerfile)
+ node, err := imagebuilder.ParseDockerfile(strings.NewReader(dockerfile))
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ stages := imagebuilder.NewStages(node, b)
+ if _, err := e.Stages(b, stages, ""); err != nil {
+ t.Fatal(err)
+ }
+ })
+ }
+}
+
func TestShell(t *testing.T) {
tmpDir, err := ioutil.TempDir("", "dockerbuild-conformance-")
if err != nil {
@@ -188,6 +251,16 @@ func TestConformanceInternal(t *testing.T) {
Name: "directory",
ContextDir: "testdata/dir",
},
+ {
+ Name: "directory with slash",
+ ContextDir: "testdata/overlapdir",
+ Dockerfile: "Dockerfile.with_slash",
+ },
+ {
+ Name: "directory without slash",
+ ContextDir: "testdata/overlapdir",
+ Dockerfile: "Dockerfile.without_slash",
+ },
// TODO: Fix this test
// {
// ContextDir: "testdata/ignore",
@@ -423,19 +496,23 @@ func conformanceTester(t *testing.T, c *docker.Client, test conformanceTest, i i
}
}
- case len(test.Dockerfile) > 0:
+ case len(test.ContextDir) > 0:
+ input = filepath.Join(test.ContextDir, dockerfile)
+ dockerfilePath = filepath.Join(test.ContextDir, "Dockerfile")
+ contextDir = test.ContextDir
+ dir = test.ContextDir
+
+ if len(test.Dockerfile) > 0 {
+ dockerfilePath = filepath.Join(dir, test.Dockerfile)
+ }
+
+ default:
input = dockerfile
dockerfilePath = filepath.Join(dir, "Dockerfile")
if _, err := fileutils.CopyFile(filepath.Join("", dockerfile), dockerfilePath); err != nil {
t.Fatal(err)
}
dockerfile = "Dockerfile"
-
- default:
- input = filepath.Join(test.ContextDir, dockerfile)
- dockerfilePath = input
- contextDir = test.ContextDir
- dir = test.ContextDir
}
// read the dockerfile
@@ -718,20 +795,20 @@ func equivalentImages(t *testing.T, c *docker.Client, a, b string, testFilesyste
return false
}
for k, v := range differs {
- if ignoreFuncs(ignoreFns).Ignore(v[0], v[1]) {
+ if ignoreFuncs(ignoreFns).Ignore(v[0].Header, v[1].Header) {
delete(differs, k)
continue
}
- t.Errorf("%s %s differs:\n%#v\n%#v", a, k, v[0], v[1])
+ t.Errorf("%s %s differs:\n%#v\n%#v", a, k, v[0].Header, v[1].Header)
}
for k, v := range onlyA {
- if ignoreFuncs(ignoreFns).Ignore(v, nil) {
+ if ignoreFuncs(ignoreFns).Ignore(v.Header, nil) {
delete(onlyA, k)
continue
}
}
for k, v := range onlyB {
- if ignoreFuncs(ignoreFns).Ignore(nil, v) {
+ if ignoreFuncs(ignoreFns).Ignore(nil, v.Header) {
delete(onlyB, k)
continue
}
@@ -812,7 +889,7 @@ func ignoreDockerfileSize(dockerfile string) ignoreFunc {
// compareImageFS exports the file systems of two images and returns a map
// of files that differ in any way (modification time excluded), only exist in
// image A, or only existing in image B.
-func compareImageFS(c *docker.Client, a, b string) (differ map[string][]*tar.Header, onlyA, onlyB map[string]*tar.Header, err error) {
+func compareImageFS(c *docker.Client, a, b string) (differ map[string][]tarHeader, onlyA, onlyB map[string]tarHeader, err error) {
fsA, err := imageFSMetadata(c, a)
if err != nil {
return nil, nil, nil, err
@@ -821,8 +898,8 @@ func compareImageFS(c *docker.Client, a, b string) (differ map[string][]*tar.Hea
if err != nil {
return nil, nil, nil, err
}
- differ = make(map[string][]*tar.Header)
- onlyA = make(map[string]*tar.Header)
+ differ = make(map[string][]tarHeader)
+ onlyA = make(map[string]tarHeader)
onlyB = fsB
for k, v1 := range fsA {
v2, ok := fsB[k]
@@ -835,14 +912,26 @@ func compareImageFS(c *docker.Client, a, b string) (differ map[string][]*tar.Hea
v1.ModTime = time.Time{}
v2.ModTime = time.Time{}
if !reflect.DeepEqual(v1, v2) {
- differ[k] = []*tar.Header{v1, v2}
+ differ[k] = []tarHeader{v1, v2}
}
}
return differ, onlyA, onlyB, nil
}
+type tarHeader struct {
+ *tar.Header
+}
+
+func (h tarHeader) String() string {
+ th := h.Header
+ if th == nil {
+ return "nil"
+ }
+ return fmt.Sprintf("<%d %s>", th.Size, th.FileInfo().Mode())
+}
+
// imageFSMetadata creates a container and reads the filesystem metadata out of the archive.
-func imageFSMetadata(c *docker.Client, name string) (map[string]*tar.Header, error) {
+func imageFSMetadata(c *docker.Client, name string) (map[string]tarHeader, error) {
container, err := c.CreateContainer(docker.CreateContainerOptions{Name: name + "-export", Config: &docker.Config{Image: name}})
if err != nil {
return nil, err
@@ -850,7 +939,7 @@ func imageFSMetadata(c *docker.Client, name string) (map[string]*tar.Header, err
defer c.RemoveContainer(docker.RemoveContainerOptions{ID: container.ID, RemoveVolumes: true, Force: true})
ch := make(chan struct{})
- result := make(map[string]*tar.Header)
+ result := make(map[string]tarHeader)
r, w := io.Pipe()
go func() {
defer close(ch)
@@ -865,7 +954,7 @@ func imageFSMetadata(c *docker.Client, name string) (map[string]*tar.Header, err
}
break
}
- result[h.Name] = h
+ result[h.Name] = tarHeader{h}
}
}()
if err := c.ExportContainer(docker.ExportContainerOptions{ID: container.ID, OutputStream: w}); err != nil {
diff --git a/vendor/github.com/openshift/imagebuilder/dockerclient/copyinfo.go b/vendor/github.com/openshift/imagebuilder/dockerclient/copyinfo.go
index 59438916fdf0..5d287aa24f9e 100644
--- a/vendor/github.com/openshift/imagebuilder/dockerclient/copyinfo.go
+++ b/vendor/github.com/openshift/imagebuilder/dockerclient/copyinfo.go
@@ -21,6 +21,16 @@ type CopyInfo struct {
// CalcCopyInfo identifies the source files selected by a Dockerfile ADD or COPY instruction.
func CalcCopyInfo(origPath, rootPath string, allowWildcards bool) ([]CopyInfo, error) {
+ explicitDir := origPath == "." || origPath == "/" || strings.HasSuffix(origPath, "/.") || strings.HasSuffix(origPath, "/")
+ // all CopyInfo resulting from this call will have FromDir set to explicitDir
+ infos, err := calcCopyInfo(origPath, rootPath, allowWildcards, explicitDir)
+ if err != nil {
+ return nil, err
+ }
+ return infos, nil
+}
+
+func calcCopyInfo(origPath, rootPath string, allowWildcards, explicitDir bool) ([]CopyInfo, error) {
origPath = trimLeadingPath(origPath)
// Deal with wildcards
if allowWildcards && containsWildcards(origPath) {
@@ -40,7 +50,7 @@ func CalcCopyInfo(origPath, rootPath string, allowWildcards bool) ([]CopyInfo, e
// Note we set allowWildcards to false in case the name has
// a * in it
- subInfos, err := CalcCopyInfo(trimLeadingPath(strings.TrimPrefix(path, rootPath)), rootPath, false)
+ subInfos, err := calcCopyInfo(trimLeadingPath(strings.TrimPrefix(path, rootPath)), rootPath, false, explicitDir)
if err != nil {
return err
}
@@ -52,27 +62,31 @@ func CalcCopyInfo(origPath, rootPath string, allowWildcards bool) ([]CopyInfo, e
return copyInfos, nil
}
+ // Must be a dir or a file
+ fi, err := os.Stat(filepath.Join(rootPath, origPath))
+ if err != nil {
+ return nil, err
+ }
+
// flatten the root directory so we can rebase it
if origPath == "." {
+ if !fi.IsDir() {
+ // we want to mount a single file as input
+ return []CopyInfo{{FileInfo: fi, Path: origPath, FromDir: false}}, nil
+ }
var copyInfos []CopyInfo
infos, err := ioutil.ReadDir(rootPath)
if err != nil {
return nil, err
}
for _, info := range infos {
- copyInfos = append(copyInfos, CopyInfo{FileInfo: info, Path: info.Name(), FromDir: true})
+ copyInfos = append(copyInfos, CopyInfo{FileInfo: info, Path: info.Name(), FromDir: explicitDir})
}
return copyInfos, nil
}
- // Must be a dir or a file
- fi, err := os.Stat(filepath.Join(rootPath, origPath))
- if err != nil {
- return nil, err
- }
-
origPath = trimTrailingDot(origPath)
- return []CopyInfo{{FileInfo: fi, Path: origPath}}, nil
+ return []CopyInfo{{FileInfo: fi, Path: origPath, FromDir: explicitDir}}, nil
}
func DownloadURL(src, dst, tempDir string) ([]CopyInfo, string, error) {
@@ -131,6 +145,20 @@ func trimLeadingPath(origPath string) string {
return origPath
}
+func ensureTrailingSlash(origPath string) string {
+ if !strings.HasSuffix(origPath, "/") {
+ origPath += "/"
+ }
+ return origPath
+}
+
+func trimTrailingSlash(origPath string) string {
+ if origPath == "/" {
+ return origPath
+ }
+ return strings.TrimSuffix(origPath, "/")
+}
+
func trimTrailingDot(origPath string) string {
if strings.HasSuffix(origPath, string(os.PathSeparator)+".") {
return strings.TrimSuffix(origPath, ".")
diff --git a/vendor/github.com/openshift/imagebuilder/dockerclient/copyinfo_test.go b/vendor/github.com/openshift/imagebuilder/dockerclient/copyinfo_test.go
index 5b8d92ef9194..7828e3511850 100644
--- a/vendor/github.com/openshift/imagebuilder/dockerclient/copyinfo_test.go
+++ b/vendor/github.com/openshift/imagebuilder/dockerclient/copyinfo_test.go
@@ -1,6 +1,7 @@
package dockerclient
import (
+ "fmt"
"reflect"
"testing"
)
@@ -75,6 +76,75 @@ func TestCalcCopyInfo(t *testing.T) {
"subdir": {},
},
},
+ {
+ origPath: ".",
+ dstPath: "copy",
+ rootPath: "testdata/dir",
+ allowWildcards: true,
+ errFn: nilErr,
+ paths: map[string]struct{}{
+ "file": {},
+ "Dockerfile": {},
+ "subdir": {},
+ },
+ rebaseNames: map[string]string{
+ "file": "copy/file",
+ "Dockerfile": "copy/Dockerfile",
+ "subdir": "copy/subdir",
+ },
+ },
+ {
+ origPath: ".",
+ dstPath: "copy",
+ rootPath: "testdata/singlefile",
+ allowWildcards: true,
+ errFn: nilErr,
+ paths: map[string]struct{}{
+ "Dockerfile": {},
+ },
+ rebaseNames: map[string]string{
+ "Dockerfile": "copy/Dockerfile",
+ },
+ },
+ {
+ origPath: "existing/",
+ dstPath: ".",
+ rootPath: "testdata/overlapdir",
+ allowWildcards: true,
+ errFn: nilErr,
+ paths: map[string]struct{}{
+ "existing/": {},
+ },
+ rebaseNames: map[string]string{
+ "existing": ".",
+ },
+ },
+ {
+ origPath: "existing",
+ dstPath: ".",
+ rootPath: "testdata/overlapdir",
+ allowWildcards: true,
+ errFn: nilErr,
+ paths: map[string]struct{}{
+ "existing": {},
+ },
+ rebaseNames: map[string]string{
+ "existing": ".",
+ },
+ },
+ {
+ origPath: "existing",
+ dstPath: "/",
+ rootPath: "testdata/overlapdir",
+ allowWildcards: true,
+ errFn: nilErr,
+ paths: map[string]struct{}{
+ "existing": {},
+ },
+ rebaseNames: map[string]string{
+ "existing": "/",
+ },
+ },
{
origPath: "subdir/.",
rootPath: "testdata/dir",
@@ -112,38 +182,39 @@ func TestCalcCopyInfo(t *testing.T) {
},
dstPath: "test/",
rebaseNames: map[string]string{
- "subdir/": "test/",
+ "subdir": "test",
},
},
}
for i, test := range tests {
- infos, err := CalcCopyInfo(test.origPath, test.rootPath, test.allowWildcards)
- if !test.errFn(err) {
- t.Errorf("%d: unexpected error: %v", i, err)
- continue
- }
- if err != nil {
- continue
- }
- expect := make(map[string]struct{})
- for k := range test.paths {
- expect[k] = struct{}{}
- }
- for _, info := range infos {
- if _, ok := expect[info.Path]; ok {
- delete(expect, info.Path)
- } else {
- t.Errorf("%d: did not expect path %s", i, info.Path)
+ t.Run(fmt.Sprintf("%d", i), func(t *testing.T) {
+ infos, err := CalcCopyInfo(test.origPath, test.rootPath, test.allowWildcards)
+ if !test.errFn(err) {
+ t.Fatalf("unexpected error: %v", err)
+ }
+ if err != nil {
+ return
+ }
+ expect := make(map[string]struct{})
+ for k := range test.paths {
+ expect[k] = struct{}{}
+ }
+ for _, info := range infos {
+ if _, ok := expect[info.Path]; ok {
+ delete(expect, info.Path)
+ } else {
+ t.Errorf("did not expect path %s", info.Path)
+ }
+ }
+ if len(expect) > 0 {
+ t.Errorf("did not see paths: %#v", expect)
}
- }
- if len(expect) > 0 {
- t.Errorf("%d: did not see paths: %#v", i, expect)
- }
- options := archiveOptionsFor(infos, test.dstPath, test.excludes)
- if !reflect.DeepEqual(test.rebaseNames, options.RebaseNames) {
- t.Errorf("%d: rebase names did not match: %#v", i, options.RebaseNames)
- }
+ options := archiveOptionsFor(infos, test.dstPath, test.excludes)
+ if !reflect.DeepEqual(test.rebaseNames, options.RebaseNames) {
+ t.Errorf("rebase names did not match:\n%#v\n%#v", test.rebaseNames, options.RebaseNames)
+ }
+ })
}
}
diff --git a/vendor/github.com/openshift/imagebuilder/dockerclient/testdata/Dockerfile.copyfrom_1 b/vendor/github.com/openshift/imagebuilder/dockerclient/testdata/Dockerfile.copyfrom_1
new file mode 100644
index 000000000000..9ee899789aea
--- /dev/null
+++ b/vendor/github.com/openshift/imagebuilder/dockerclient/testdata/Dockerfile.copyfrom_1
@@ -0,0 +1,5 @@
+FROM busybox as base
+RUN touch /a /b
+FROM busybox
+COPY --from=base /a /
+RUN ls -al /a
\ No newline at end of file
diff --git a/vendor/github.com/openshift/imagebuilder/dockerclient/testdata/Dockerfile.copyfrom_10 b/vendor/github.com/openshift/imagebuilder/dockerclient/testdata/Dockerfile.copyfrom_10
new file mode 100644
index 000000000000..986e67e6ad88
--- /dev/null
+++ b/vendor/github.com/openshift/imagebuilder/dockerclient/testdata/Dockerfile.copyfrom_10
@@ -0,0 +1,5 @@
+FROM busybox as base
+RUN mkdir -p /a && touch /a/1
+FROM busybox
+COPY --from=base /a/1 /a/b/c
+RUN ls -al /a/b/c && ! ls -al /a/b/1
\ No newline at end of file
diff --git a/vendor/github.com/openshift/imagebuilder/dockerclient/testdata/Dockerfile.copyfrom_2 b/vendor/github.com/openshift/imagebuilder/dockerclient/testdata/Dockerfile.copyfrom_2
new file mode 100644
index 000000000000..11f7f860e454
--- /dev/null
+++ b/vendor/github.com/openshift/imagebuilder/dockerclient/testdata/Dockerfile.copyfrom_2
@@ -0,0 +1,5 @@
+FROM busybox as base
+RUN touch /a
+FROM busybox
+COPY --from=base /a /a
+RUN ls -al /a
\ No newline at end of file
diff --git a/vendor/github.com/openshift/imagebuilder/dockerclient/testdata/Dockerfile.copyfrom_3 b/vendor/github.com/openshift/imagebuilder/dockerclient/testdata/Dockerfile.copyfrom_3
new file mode 100644
index 000000000000..1e3e701a0897
--- /dev/null
+++ b/vendor/github.com/openshift/imagebuilder/dockerclient/testdata/Dockerfile.copyfrom_3
@@ -0,0 +1,6 @@
+FROM busybox as base
+RUN touch /a
+FROM busybox
+WORKDIR /b
+COPY --from=base /a .
+RUN ls -al /b/a
\ No newline at end of file
diff --git a/vendor/github.com/openshift/imagebuilder/dockerclient/testdata/Dockerfile.copyfrom_4 b/vendor/github.com/openshift/imagebuilder/dockerclient/testdata/Dockerfile.copyfrom_4
new file mode 100644
index 000000000000..e00201fdec36
--- /dev/null
+++ b/vendor/github.com/openshift/imagebuilder/dockerclient/testdata/Dockerfile.copyfrom_4
@@ -0,0 +1,5 @@
+FROM busybox as base
+RUN mkdir -p /a/b && touch /a/b/1 /a/b/2
+FROM busybox
+COPY --from=base /a/b/ /b/
+RUN ls -al /b/1 /b/2 /b && ! ls -al /a
diff --git a/vendor/github.com/openshift/imagebuilder/dockerclient/testdata/Dockerfile.copyfrom_5 b/vendor/github.com/openshift/imagebuilder/dockerclient/testdata/Dockerfile.copyfrom_5
new file mode 100644
index 000000000000..c5e298d04e97
--- /dev/null
+++ b/vendor/github.com/openshift/imagebuilder/dockerclient/testdata/Dockerfile.copyfrom_5
@@ -0,0 +1,5 @@
+FROM busybox as base
+RUN mkdir -p /a/b && touch /a/b/1 /a/b/2
+FROM busybox
+COPY --from=base /a/b/* /b/
+RUN ls -al /b/1 /b/2 /b && ! ls -al /a /b/a /b/b
diff --git a/vendor/github.com/openshift/imagebuilder/dockerclient/testdata/Dockerfile.copyfrom_6 b/vendor/github.com/openshift/imagebuilder/dockerclient/testdata/Dockerfile.copyfrom_6
new file mode 100644
index 000000000000..0800646de6a0
--- /dev/null
+++ b/vendor/github.com/openshift/imagebuilder/dockerclient/testdata/Dockerfile.copyfrom_6
@@ -0,0 +1,5 @@
+FROM busybox as base
+RUN mkdir -p /a/b && touch /a/b/1 /a/b/2
+FROM busybox
+COPY --from=base /a/b/. /b/
+RUN ls -al /b/1 /b/2 /b && ! ls -al /a /b/a /b/b
diff --git a/vendor/github.com/openshift/imagebuilder/dockerclient/testdata/Dockerfile.copyfrom_7 b/vendor/github.com/openshift/imagebuilder/dockerclient/testdata/Dockerfile.copyfrom_7
new file mode 100644
index 000000000000..6ee21bc2908e
--- /dev/null
+++ b/vendor/github.com/openshift/imagebuilder/dockerclient/testdata/Dockerfile.copyfrom_7
@@ -0,0 +1,5 @@
+FROM busybox as base
+RUN touch /b
+FROM busybox
+COPY --from=base /b /a
+RUN ls -al /a && ! ls -al /b
\ No newline at end of file
diff --git a/vendor/github.com/openshift/imagebuilder/dockerclient/testdata/Dockerfile.copyfrom_8 b/vendor/github.com/openshift/imagebuilder/dockerclient/testdata/Dockerfile.copyfrom_8
new file mode 100644
index 000000000000..47c7059c561d
--- /dev/null
+++ b/vendor/github.com/openshift/imagebuilder/dockerclient/testdata/Dockerfile.copyfrom_8
@@ -0,0 +1,5 @@
+FROM busybox as base
+RUN mkdir -p /a && touch /a/b
+FROM busybox
+COPY --from=base /a/b /a
+RUN ls -al /a && ! ls -al /b
\ No newline at end of file
diff --git a/vendor/github.com/openshift/imagebuilder/dockerclient/testdata/Dockerfile.copyfrom_9 b/vendor/github.com/openshift/imagebuilder/dockerclient/testdata/Dockerfile.copyfrom_9
new file mode 100644
index 000000000000..a8a83ce46b86
--- /dev/null
+++ b/vendor/github.com/openshift/imagebuilder/dockerclient/testdata/Dockerfile.copyfrom_9
@@ -0,0 +1,5 @@
+FROM busybox as base
+RUN mkdir -p /a && touch /a/1
+FROM busybox
+COPY --from=base /a/1 /a/b/c/
+RUN ls -al /a/b/c/1 && ! ls -al /a/b/1
\ No newline at end of file
diff --git a/vendor/github.com/openshift/imagebuilder/dockerclient/testdata/copyfrom/Dockerfile b/vendor/github.com/openshift/imagebuilder/dockerclient/testdata/copyfrom/Dockerfile
new file mode 100644
index 000000000000..a0cf363b1744
--- /dev/null
+++ b/vendor/github.com/openshift/imagebuilder/dockerclient/testdata/copyfrom/Dockerfile
@@ -0,0 +1,4 @@
+FROM centos:7 as base
+RUN mkdir -p /a/blah && touch /a/blah/1 /a/blah/2
+FROM centos:7
+COPY --from=base /a/blah/* /blah/
diff --git a/vendor/github.com/openshift/imagebuilder/dockerclient/testdata/dir/Dockerfile b/vendor/github.com/openshift/imagebuilder/dockerclient/testdata/dir/Dockerfile
index 6e8b8f7f6f29..4164fec4a7ef 100644
--- a/vendor/github.com/openshift/imagebuilder/dockerclient/testdata/dir/Dockerfile
+++ b/vendor/github.com/openshift/imagebuilder/dockerclient/testdata/dir/Dockerfile
@@ -1,4 +1,4 @@
FROM busybox
COPY . /
COPY . dir
-COPY subdir/ test/
\ No newline at end of file
+COPY subdir/ test/
diff --git a/vendor/github.com/openshift/imagebuilder/dockerclient/testdata/overlapdir/Dockerfile.with_slash b/vendor/github.com/openshift/imagebuilder/dockerclient/testdata/overlapdir/Dockerfile.with_slash
new file mode 100644
index 000000000000..81988dba2d07
--- /dev/null
+++ b/vendor/github.com/openshift/imagebuilder/dockerclient/testdata/overlapdir/Dockerfile.with_slash
@@ -0,0 +1,2 @@
+FROM busybox
+COPY existing/ .
\ No newline at end of file
diff --git a/vendor/github.com/openshift/imagebuilder/dockerclient/testdata/overlapdir/Dockerfile.without_slash b/vendor/github.com/openshift/imagebuilder/dockerclient/testdata/overlapdir/Dockerfile.without_slash
new file mode 100644
index 000000000000..0833b157ae26
--- /dev/null
+++ b/vendor/github.com/openshift/imagebuilder/dockerclient/testdata/overlapdir/Dockerfile.without_slash
@@ -0,0 +1,2 @@
+FROM busybox
+COPY existing .
\ No newline at end of file
diff --git a/vendor/github.com/openshift/imagebuilder/dockerclient/testdata/overlapdir/existing/etc/file-in-existing-dir b/vendor/github.com/openshift/imagebuilder/dockerclient/testdata/overlapdir/existing/etc/file-in-existing-dir
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/vendor/github.com/openshift/imagebuilder/dockerclient/testdata/singlefile/Dockerfile b/vendor/github.com/openshift/imagebuilder/dockerclient/testdata/singlefile/Dockerfile
new file mode 100644
index 000000000000..5bede2701014
--- /dev/null
+++ b/vendor/github.com/openshift/imagebuilder/dockerclient/testdata/singlefile/Dockerfile
@@ -0,0 +1,2 @@
+FROM busybox
+COPY . copy
\ No newline at end of file
diff --git a/vendor/github.com/openshift/imagebuilder/internals.go b/vendor/github.com/openshift/imagebuilder/internals.go
index 9a8005bfc276..3c60c7983bd6 100644
--- a/vendor/github.com/openshift/imagebuilder/internals.go
+++ b/vendor/github.com/openshift/imagebuilder/internals.go
@@ -46,12 +46,23 @@ func handleJSONArgs(args []string, attributes map[string]bool) []string {
return []string{strings.Join(args, " ")}
}
+func hasSlash(input string) bool {
+ return strings.HasSuffix(input, string(os.PathSeparator)) || strings.HasSuffix(input, string(os.PathSeparator)+".")
+}
+
// makeAbsolute ensures that the provided path is absolute.
func makeAbsolute(dest, workingDir string) string {
// Twiddle the destination when its a relative path - meaning, make it
// relative to the WORKINGDIR
+ if dest == "." {
+ if !hasSlash(workingDir) {
+ workingDir += string(os.PathSeparator)
+ }
+ dest = workingDir
+ }
+
if !filepath.IsAbs(dest) {
- hasSlash := strings.HasSuffix(dest, string(os.PathSeparator)) || strings.HasSuffix(dest, string(os.PathSeparator)+".")
+ hasSlash := hasSlash(dest)
dest = filepath.Join(string(os.PathSeparator), filepath.FromSlash(workingDir), dest)
// Make sure we preserve any trailing slash
diff --git a/vendor/k8s.io/kubernetes/pkg/cloudprovider/providers/aws/aws.go b/vendor/k8s.io/kubernetes/pkg/cloudprovider/providers/aws/aws.go
index 1c006ebce1a8..06269a184150 100644
--- a/vendor/k8s.io/kubernetes/pkg/cloudprovider/providers/aws/aws.go
+++ b/vendor/k8s.io/kubernetes/pkg/cloudprovider/providers/aws/aws.go
@@ -221,6 +221,13 @@ const (
createTagFactor = 2.0
createTagSteps = 9
+ // encryptedCheck* is configuration of poll for created volume to check
+ // it has not been silently removed by AWS.
+ // On a random AWS account (shared among several developers) it took 4s on
+ // average.
+ encryptedCheckInterval = 1 * time.Second
+ encryptedCheckTimeout = 30 * time.Second
+
// Number of node names that can be added to a filter. The AWS limit is 200
// but we are using a lower limit on purpose
filterNodeLimit = 150
@@ -2202,14 +2209,6 @@ func (c *Cloud) CreateDisk(volumeOptions *VolumeOptions) (KubernetesVolumeID, er
request.VolumeType = aws.String(createType)
request.Encrypted = aws.Bool(volumeOptions.Encrypted)
if len(volumeOptions.KmsKeyId) > 0 {
- if missing, err := c.checkEncryptionKey(volumeOptions.KmsKeyId); err != nil {
- if missing {
- // KSM key is missing, provisioning would fail
- return "", err
- }
- // Log checkEncryptionKey error and try provisioning anyway.
- glog.Warningf("Cannot check KSM key %s: %v", volumeOptions.KmsKeyId, err)
- }
request.KmsKeyId = aws.String(volumeOptions.KmsKeyId)
request.Encrypted = aws.Bool(true)
}
@@ -2238,24 +2237,50 @@ func (c *Cloud) CreateDisk(volumeOptions *VolumeOptions) (KubernetesVolumeID, er
return "", fmt.Errorf("error tagging volume %s: %q", volumeName, err)
}
+ // AWS has a bad habbit of reporting success when creating a volume with
+ // encryption keys that either don't exists or have wrong permissions.
+ // Such volume lives for couple of seconds and then it's silently deleted
+ // by AWS. There is no other check to ensure that given KMS key is correct,
+ // because Kubernetes may have limited permissions to the key.
+ if len(volumeOptions.KmsKeyId) > 0 {
+ err := c.waitUntilVolumeAvailable(volumeName)
+ if err != nil {
+ if isAWSErrorVolumeNotFound(err) {
+ err = fmt.Errorf("failed to create encrypted volume: the volume disappeared after creation, most likely due to inaccessible KMS encryption key")
+ }
+ return "", err
+ }
+ }
+
return volumeName, nil
}
-// checkEncryptionKey tests that given encryption key exists.
-func (c *Cloud) checkEncryptionKey(keyId string) (missing bool, err error) {
- input := &kms.DescribeKeyInput{
- KeyId: aws.String(keyId),
- }
- _, err = c.kms.DescribeKey(input)
- if err == nil {
- return false, nil
+func (c *Cloud) waitUntilVolumeAvailable(volumeName KubernetesVolumeID) error {
+ disk, err := newAWSDisk(c, volumeName)
+ if err != nil {
+ // Unreachable code
+ return err
}
- if awsError, ok := err.(awserr.Error); ok {
- if awsError.Code() == "NotFoundException" {
- return true, fmt.Errorf("KMS key %s not found: %q", keyId, err)
+
+ err = wait.Poll(encryptedCheckInterval, encryptedCheckTimeout, func() (done bool, err error) {
+ vol, err := disk.describeVolume()
+ if err != nil {
+ return true, err
}
- }
- return false, fmt.Errorf("Error checking KSM key %s: %q", keyId, err)
+ if vol.State != nil {
+ switch *vol.State {
+ case "available":
+ // The volume is Available, it won't be deleted now.
+ return true, nil
+ case "creating":
+ return false, nil
+ default:
+ return true, fmt.Errorf("unexpected State of newly created AWS EBS volume %s: %q", volumeName, *vol.State)
+ }
+ }
+ return false, nil
+ })
+ return err
}
// DeleteDisk implements Volumes.DeleteDisk
diff --git a/vendor/k8s.io/kubernetes/pkg/controller/nodelifecycle/scheduler/taint_manager.go b/vendor/k8s.io/kubernetes/pkg/controller/nodelifecycle/scheduler/taint_manager.go
index a71fa8fc788c..2add31dec757 100644
--- a/vendor/k8s.io/kubernetes/pkg/controller/nodelifecycle/scheduler/taint_manager.go
+++ b/vendor/k8s.io/kubernetes/pkg/controller/nodelifecycle/scheduler/taint_manager.go
@@ -18,6 +18,9 @@ package scheduler
import (
"fmt"
+ "sync"
+ "time"
+
"k8s.io/api/core/v1"
clientset "k8s.io/client-go/kubernetes"
"k8s.io/kubernetes/pkg/apis/core/helper"
@@ -27,8 +30,6 @@ import (
"k8s.io/apimachinery/pkg/fields"
"k8s.io/apimachinery/pkg/labels"
"k8s.io/apimachinery/pkg/types"
- "sync"
- "time"
"k8s.io/client-go/kubernetes/scheme"
v1core "k8s.io/client-go/kubernetes/typed/core/v1"
@@ -189,9 +190,11 @@ func (tc *NoExecuteTaintManager) Run(stopCh <-chan struct{}) {
nodeUpdate := item.(*nodeUpdateItem)
select {
case <-stopCh:
+ tc.nodeUpdateQueue.Done(item)
break
case tc.nodeUpdateChannel <- nodeUpdate:
}
+ tc.nodeUpdateQueue.Done(item)
}
}(stopCh)
@@ -204,9 +207,11 @@ func (tc *NoExecuteTaintManager) Run(stopCh <-chan struct{}) {
podUpdate := item.(*podUpdateItem)
select {
case <-stopCh:
+ tc.podUpdateQueue.Done(item)
break
case tc.podUpdateChannel <- podUpdate:
}
+ tc.podUpdateQueue.Done(item)
}
}(stopCh)
diff --git a/vendor/k8s.io/kubernetes/test/e2e/storage/volume_provisioning.go b/vendor/k8s.io/kubernetes/test/e2e/storage/volume_provisioning.go
index 493902734bb3..73b9a4ad41c4 100644
--- a/vendor/k8s.io/kubernetes/test/e2e/storage/volume_provisioning.go
+++ b/vendor/k8s.io/kubernetes/test/e2e/storage/volume_provisioning.go
@@ -823,6 +823,53 @@ var _ = utils.SIGDescribe("Dynamic Provisioning", func() {
testDynamicProvisioning(test, c, claim, nil)
})
})
+ Describe("Invalid AWS KMS key", func() {
+ It("should report an error and create no PV", func() {
+ framework.SkipUnlessProviderIs("aws")
+ test := storageClassTest{
+ name: "AWS EBS with invalid KMS key",
+ provisioner: "kubernetes.io/aws-ebs",
+ claimSize: "2Gi",
+ parameters: map[string]string{"kmsKeyId": "arn:aws:kms:us-east-1:123456789012:key/55555555-5555-5555-5555-555555555555"},
+ }
+
+ By("creating a StorageClass")
+ suffix := fmt.Sprintf("invalid-aws")
+ class := newStorageClass(test, ns, suffix)
+ class, err := c.StorageV1().StorageClasses().Create(class)
+ Expect(err).NotTo(HaveOccurred())
+ defer func() {
+ framework.Logf("deleting storage class %s", class.Name)
+ framework.ExpectNoError(c.StorageV1().StorageClasses().Delete(class.Name, nil))
+ }()
+
+ By("creating a claim object with a suffix for gluster dynamic provisioner")
+ claim := newClaim(test, ns, suffix)
+ claim.Spec.StorageClassName = &class.Name
+ claim, err = c.CoreV1().PersistentVolumeClaims(claim.Namespace).Create(claim)
+ Expect(err).NotTo(HaveOccurred())
+ defer func() {
+ framework.Logf("deleting claim %q/%q", claim.Namespace, claim.Name)
+ err = c.CoreV1().PersistentVolumeClaims(claim.Namespace).Delete(claim.Name, nil)
+ if err != nil && !apierrs.IsNotFound(err) {
+ framework.Failf("Error deleting claim %q. Error: %v", claim.Name, err)
+ }
+ }()
+
+ // Watch events until the message about invalid key appears
+ err = wait.Poll(time.Second, framework.ClaimProvisionTimeout, func() (bool, error) {
+ events, err := c.CoreV1().Events(claim.Namespace).List(metav1.ListOptions{})
+ Expect(err).NotTo(HaveOccurred())
+ for _, event := range events.Items {
+ if strings.Contains(event.Message, "failed to create encrypted volume: the volume disappeared after creation, most likely due to inaccessible KMS encryption key") {
+ return true, nil
+ }
+ }
+ return false, nil
+ })
+ Expect(err).NotTo(HaveOccurred())
+ })
+ })
})
func getDefaultStorageClassName(c clientset.Interface) string {