initial rewrite

Signed-off-by: Vasiliy Tolstov <v.tolstov@unistack.org>
This commit is contained in:
2019-04-18 23:13:04 +03:00
commit 24f641df0d
37 changed files with 4349 additions and 0 deletions

208
crushmap/binary.go Normal file
View File

@@ -0,0 +1,208 @@
package crushmap
import (
"bytes"
"encoding/binary"
"fmt"
"io"
)
const (
Magic = uint32(0x00010000)
)
type CrushRuleOpType uint32
const (
CrushRuleNoop CrushRuleOpType = iota
CrushRuleTake
CrushRuleChooseFirstN
CrushRuleChooseIndep
CrushRuleEmit
CrushRuleChooseleafFirstN
CrushRuleChooseleafIndep
CrushRuleSetChooseTries
CrushRuleSetChooseleafTries
CrushRuleSetChooseLocalTries
CrushRuleSetChooseLocalFallbackTries
CrushRuleSetChooseleafVaryR
CrushRuleSetChooseleafStable
)
var (
crushRuleOpTypeStringMap = map[CrushRuleOpType]string{
CrushRuleNoop: "noop",
CrushRuleTake: "take",
CrushRuleChooseFirstN: "choose firstn",
CrushRuleChooseIndep: "choose indep",
CrushRuleEmit: "emit",
CrushRuleChooseleafFirstN: "choose_leaf firstn",
CrushRuleChooseleafIndep: "choose_leaf indep",
CrushRuleSetChooseTries: "set_choose_tries",
CrushRuleSetChooseleafTries: "set_chooseleaf_tries",
CrushRuleSetChooseLocalTries: "set_choose_local_tries",
CrushRuleSetChooseLocalFallbackTries: "set_choose_local_fallback_tries",
CrushRuleSetChooseleafVaryR: "set_choose_leaf_vary_r",
CrushRuleSetChooseleafStable: "set_choose_leaf_stable",
}
crushRuleOpStringTypeMap = map[string]CrushRuleOpType{
"noop": CrushRuleNoop,
"take": CrushRuleTake,
"choose firstn": CrushRuleChooseFirstN,
"choose indep": CrushRuleChooseIndep,
"emit": CrushRuleEmit,
"choose_leaf firstn": CrushRuleChooseleafFirstN,
"choose_leaf indep": CrushRuleChooseleafIndep,
"set choose_tries": CrushRuleSetChooseTries,
"set chooseleaf_tries": CrushRuleSetChooseleafTries,
"set choose_local_tries": CrushRuleSetChooseLocalTries,
"set choose_local_fallback_tries": CrushRuleSetChooseLocalFallbackTries,
"set choose_leaf_vary_r": CrushRuleSetChooseleafVaryR,
"set choose_leaf_stable": CrushRuleSetChooseleafStable,
}
)
func (t CrushRuleOpType) String() string {
op, ok := crushRuleOpTypeStringMap[t]
if !ok {
op = "invalid"
}
return op
}
type CrushRuleStep struct {
Op CrushRuleOpType
Arg1 int32
Arg2 int32
}
type binaryParser struct {
r io.Reader
w io.Writer
}
func (cmap *Map) DecodeBinary(data []byte) error {
var err error
var magic uint32
p := &binaryParser{r: bytes.NewBuffer(data)}
err = binary.Read(p.r, binary.LittleEndian, &magic)
if err != nil {
return err
} else if magic != Magic {
return fmt.Errorf("invalid magic: %0x != %0x", magic, Magic)
}
var (
maxBuckets int32
maxRules uint32
maxDevices int32
)
err = binary.Read(p.r, binary.LittleEndian, &maxBuckets)
if err != nil {
return err
}
err = binary.Read(p.r, binary.LittleEndian, &maxRules)
if err != nil {
return err
}
err = binary.Read(p.r, binary.LittleEndian, &maxDevices)
if err != nil {
return err
}
for i := int32(0); i < maxBuckets; i++ {
ibucket, err := p.handleBucket()
if err != nil {
return err
}
if ibucket == nil {
continue
}
cmap.Buckets = append(cmap.Buckets, ibucket)
}
for i := uint32(0); i < maxRules; i++ {
irule, err := p.handleRule()
if err != nil {
return err
}
cmap.Rules = append(cmap.Rules, irule)
}
itypes, err := p.handleType()
if err != nil {
return err
}
cmap.Types = itypes
btypes := make(map[int32]string, len(itypes))
for _, t := range itypes {
btypes[t.ID] = t.Name
}
bnames := make(map[int32]string)
itypes, err = p.handleType()
if err != nil {
return err
}
for _, t := range itypes {
bnames[t.ID] = t.Name
}
rnames := make(map[int32]string)
itypes, err = p.handleType()
if err != nil {
return err
}
for _, t := range itypes {
rnames[t.ID] = t.Name
}
var ok bool
for _, bucket := range cmap.Buckets {
if bucket != nil {
if bucket.TypeName, ok = btypes[int32(bucket.TypeID)]; !ok {
return fmt.Errorf("unknown type id: %d", bucket.TypeID)
}
for _, item := range bucket.Items {
if item.Name, ok = bnames[int32(bucket.ID)]; !ok {
return fmt.Errorf("unknown type id: %d", bucket.ID)
}
}
}
}
itypes, err = p.handleType()
if err != nil {
return err
}
for _, rule := range cmap.Rules {
if rule.Name, ok = rnames[int32(rule.Ruleset)]; !ok {
return fmt.Errorf("unknown type id: %d", rule.ID)
}
for _, step := range rule.Steps {
switch step.Op {
default:
case CrushRuleChooseFirstN.String(), CrushRuleChooseIndep.String(), CrushRuleChooseleafFirstN.String(), CrushRuleChooseleafIndep.String():
if step.ItemType, ok = btypes[step.ItemTypeID]; !ok {
return fmt.Errorf("unknown type id: %d", step.ItemTypeID)
}
}
}
}
itunables, err := p.handleTunable()
if err != nil {
return err
}
cmap.Tunables = itunables
cmap.rulesSort()
cmap.bucketsSort()
return nil
}

302
crushmap/binary_bucket.go Normal file
View File

@@ -0,0 +1,302 @@
package crushmap
import (
"encoding/binary"
"fmt"
"math"
)
type binaryBucket interface {
BucketID() int32
BucketType() CrushBucketType
BucketAlg() CrushAlgType
BucketHash() CrushBucketHashType
BucketWeight() float32
BucketSize() uint32
}
func (b *BucketUniform) BucketID() int32 {
return b.ID
}
func (b *BucketUniform) BucketType() CrushBucketType {
return b.Type
}
func (b *BucketUniform) BucketAlg() CrushAlgType {
return b.Alg
}
func (b *BucketUniform) BucketHash() CrushBucketHashType {
return b.Hash
}
func (b *BucketUniform) BucketWeight() float32 {
return b.Weight
}
func (b *BucketUniform) BucketSize() uint32 {
return b.Size
}
func (b *BucketList) BucketID() int32 {
return b.ID
}
func (b *BucketList) BucketType() CrushBucketType {
return b.Type
}
func (b *BucketList) BucketAlg() CrushAlgType {
return b.Alg
}
func (b *BucketList) BucketHash() CrushBucketHashType {
return b.Hash
}
func (b *BucketList) BucketWeight() float32 {
return b.Weight
}
func (b *BucketList) BucketSize() uint32 {
return b.Size
}
func (b *BucketTree) BucketID() int32 {
return b.ID
}
func (b *BucketTree) BucketType() CrushBucketType {
return b.Type
}
func (b *BucketTree) BucketAlg() CrushAlgType {
return b.Alg
}
func (b *BucketTree) BucketHash() CrushBucketHashType {
return b.Hash
}
func (b *BucketTree) BucketWeight() float32 {
return b.Weight
}
func (b *BucketTree) BucketSize() uint32 {
return b.Size
}
func (b *BucketStraw) BucketID() int32 {
return b.ID
}
func (b *BucketStraw) BucketType() CrushBucketType {
return b.Type
}
func (b *BucketStraw) BucketAlg() CrushAlgType {
return b.Alg
}
func (b *BucketStraw) BucketHash() CrushBucketHashType {
return b.Hash
}
func (b *BucketStraw) BucketWeight() float32 {
return b.Weight
}
func (b *BucketStraw) BucketSize() uint32 {
return b.Size
}
func (b *BucketStraw2) BucketID() int32 {
return b.ID
}
func (b *BucketStraw2) BucketType() CrushBucketType {
return b.Type
}
func (b *BucketStraw2) BucketAlg() CrushAlgType {
return b.Alg
}
func (b *BucketStraw2) BucketHash() CrushBucketHashType {
return b.Hash
}
func (b *BucketStraw2) BucketWeight() float32 {
return b.Weight
}
func (b *BucketStraw2) BucketSize() uint32 {
return b.Size
}
type binaryBucketHeader struct {
ID int32
Type CrushBucketType
Alg CrushAlgType
Hash CrushBucketHashType
Weight float32
Size uint32
}
type binaryBucketCommon struct {
binaryBucketHeader
Items []int32
}
type BucketUniform struct {
binaryBucketCommon
ItemWeight float32
}
type BucketList struct {
binaryBucketCommon
ItemWeights []float32
SumWeights []float32
}
type BucketTree struct {
binaryBucketCommon
NumNodes uint8
NodeWeights []float32
}
type BucketStraw struct {
binaryBucketCommon
ItemWeights []float32
Straws []uint32
}
type BucketStraw2 struct {
binaryBucketCommon
ItemWeights []float32
}
func (b *binaryBucketHeader) String() string {
return fmt.Sprintf("id: %d, type: %s, alg: %s, hash: %s, weight: %f, size: %d",
b.ID, b.Type, b.Alg, b.Hash, b.Weight, b.Size)
}
func (p *binaryParser) handleBucket() (*Bucket, error) {
var err error
ibucket := &Bucket{}
var bucket binaryBucket
var alg uint32
err = binary.Read(p.r, binary.LittleEndian, &alg)
if err != nil {
return nil, err
}
if CrushAlgType(alg) == CrushAlgInvalid {
return nil, nil
}
bucketHeader := binaryBucketHeader{}
err = binary.Read(p.r, binary.LittleEndian, &bucketHeader.ID)
if err != nil {
return nil, err
}
err = binary.Read(p.r, binary.LittleEndian, &bucketHeader.Type)
if err != nil {
return nil, err
}
err = binary.Read(p.r, binary.LittleEndian, &bucketHeader.Alg)
if err != nil {
return nil, err
}
err = binary.Read(p.r, binary.LittleEndian, &bucketHeader.Hash)
if err != nil {
return nil, err
}
var weight uint32
err = binary.Read(p.r, binary.LittleEndian, &weight)
if err != nil {
return nil, err
}
bucketHeader.Weight = math.Float32frombits(weight)
err = binary.Read(p.r, binary.LittleEndian, &bucketHeader.Size)
if err != nil {
return nil, err
}
bucketCommon := binaryBucketCommon{binaryBucketHeader: bucketHeader}
bucketCommon.Items = make([]int32, bucketHeader.Size)
for i := uint32(0); i < bucketHeader.Size; i++ {
err = binary.Read(p.r, binary.LittleEndian, &bucketCommon.Items[i])
if err != nil {
return nil, err
}
}
switch bucketHeader.Alg {
case CrushAlgUniform:
bucketUniform := &BucketUniform{
binaryBucketCommon: bucketCommon,
}
var itemWeight uint32
err = binary.Read(p.r, binary.LittleEndian, &itemWeight)
if err != nil {
return nil, err
}
bucketUniform.ItemWeight = math.Float32frombits(itemWeight)
bucket = bucketUniform
case CrushAlgList:
itemWeights := make([]uint32, bucketHeader.Size)
sumWeights := make([]uint32, bucketHeader.Size)
bucketList := &BucketList{
binaryBucketCommon: bucketCommon,
ItemWeights: make([]float32, bucketHeader.Size),
SumWeights: make([]float32, bucketHeader.Size),
}
for i := uint32(0); i <= bucketHeader.Size; i++ {
err = binary.Read(p.r, binary.LittleEndian, &itemWeights[i])
if err != nil {
return nil, err
}
bucketList.ItemWeights[i] = math.Float32frombits(itemWeights[i])
err = binary.Read(p.r, binary.LittleEndian, &sumWeights[i])
if err != nil {
return nil, err
}
bucketList.SumWeights[i] = math.Float32frombits(sumWeights[i])
}
bucket = bucketList
case CrushAlgTree:
bucketTree := &BucketTree{
binaryBucketCommon: bucketCommon,
}
err = binary.Read(p.r, binary.LittleEndian, &bucketTree.NumNodes)
if err != nil {
return nil, err
}
nodeWeights := make([]uint32, bucketTree.NumNodes*4)
bucketTree.NodeWeights = make([]float32, bucketTree.NumNodes*4)
err = binary.Read(p.r, binary.LittleEndian, &nodeWeights)
if err != nil {
return nil, err
}
for i := 0; i < int(bucketTree.NumNodes*4); i++ {
bucketTree.NodeWeights[i] = math.Float32frombits(nodeWeights[i])
}
bucket = bucketTree
case CrushAlgStraw:
itemWeights := make([]uint32, (bucketHeader.Size)*4)
bucketStraw := &BucketStraw{
binaryBucketCommon: bucketCommon,
Straws: make([]uint32, (bucketHeader.Size)*4),
ItemWeights: make([]float32, (bucketHeader.Size)*4),
}
for i := uint32(0); i < bucketHeader.Size; i++ {
err = binary.Read(p.r, binary.LittleEndian, &itemWeights[i])
if err != nil {
return nil, err
}
bucketStraw.ItemWeights[i] = math.Float32frombits(itemWeights[i])
err = binary.Read(p.r, binary.LittleEndian, &bucketStraw.Straws[i])
if err != nil {
return nil, err
}
}
bucket = bucketStraw
case CrushAlgStraw2:
itemWeights := make([]uint32, (bucketHeader.Size+1)*4)
bucketStraw2 := &BucketStraw2{
binaryBucketCommon: bucketCommon,
ItemWeights: make([]float32, (bucketHeader.Size+1)*4),
}
err = binary.Read(p.r, binary.LittleEndian, &itemWeights)
if err != nil {
return nil, err
}
for i := uint32(0); i < (bucketHeader.Size+1)*4; i++ {
bucketStraw2.ItemWeights[i] = math.Float32frombits(itemWeights[i])
}
bucket = bucketStraw2
}
ibucket.ID = bucketHeader.ID
ibucket.Alg = bucketHeader.Alg.String()
ibucket.Hash = bucketHeader.Hash.String()
ibucket.TypeID = bucketHeader.Type
ibucket.Weight = bucketHeader.Weight
ibucket.Size = bucketHeader.Size
_ = bucket
return ibucket, nil
}

85
crushmap/binary_rule.go Normal file
View File

@@ -0,0 +1,85 @@
package crushmap
import (
"encoding/binary"
)
type binaryRuleStep struct {
Op CrushRuleOpType
Arg1 int32
Arg2 int32
}
type binaryRuleMask struct {
Ruleset uint8
Type uint8
MinSize uint8
MaxSize uint8
}
type binaryRule struct {
Len uint32
Mask binaryRuleMask
Steps []binaryRuleStep
}
func (p *binaryParser) handleRule() (*Rule, error) {
var err error
irule := &Rule{}
var yes uint32
err = binary.Read(p.r, binary.LittleEndian, &yes)
if err != nil {
return nil, err
}
if yes == 0 {
return nil, nil
}
var rule binaryRule
err = binary.Read(p.r, binary.LittleEndian, &rule.Len)
if err != nil {
return nil, err
}
err = binary.Read(p.r, binary.LittleEndian, &rule.Mask)
if err != nil {
return nil, err
}
// rule.Steps = make([]RuleStep, rule.Len)
for i := uint32(0); i < rule.Len; i++ {
var step binaryRuleStep
istep := &Step{}
err = binary.Read(p.r, binary.LittleEndian, &step)
if err != nil {
return nil, err
}
istep.Op = step.Op.String()
switch step.Op {
case CrushRuleChooseFirstN, CrushRuleChooseIndep, CrushRuleChooseleafFirstN, CrushRuleChooseleafIndep:
istep.Num = step.Arg1
istep.ItemTypeID = step.Arg2 //TYPE!!!
case CrushRuleTake:
istep.ItemTypeID = step.Arg1
// case CrushRuleEmit:
// default:
// panic(step.Op.String())
}
/*
Op string `json:"op"`
Item int `json:"item,omitempty"`
ItemName string `json:"item_name,omitempty"`
ItemClass string `json:"item_class,omitempty"`
Num int `json:"num,omitempty"`
ItemType string `json:"type,omitempty"`
*/
irule.Steps = append(irule.Steps, istep)
}
irule.ID = rule.Mask.Ruleset
irule.Ruleset = rule.Mask.Ruleset
irule.MinSize = rule.Mask.MinSize
irule.MaxSize = rule.Mask.MaxSize
irule.Type = rule.Mask.Type
return irule, nil
}

20
crushmap/binary_test.go Normal file
View File

@@ -0,0 +1,20 @@
package crushmap
import (
"io/ioutil"
"testing"
)
func TestBinary(t *testing.T) {
buf, err := ioutil.ReadFile("testdata/map.bin")
if err != nil {
t.Fatal(err)
}
m := NewMap()
err = m.DecodeBinary(buf)
if err != nil {
t.Fatal(err)
}
_ = m
}

103
crushmap/binary_tunable.go Normal file
View File

@@ -0,0 +1,103 @@
package crushmap
import (
"encoding/binary"
"io"
)
type tunables struct {
// new block
ChooseLocalTries uint32 `json:"choose_local_tries,omitempty"`
ChooseLocalFallbackTries uint32 `json:"choose_local_fallback_tries,omitempty"`
ChooseTotalTries uint32 `json:"choose_total_tries,omitempty"`
// new block must be equal 1
ChooseleafDescendOnce uint32 `json:"chooseleaf_descend_once,omitempty"`
// new block must be equal 1
ChooseleafVaryR uint8 `json:"chooseleaf_vary_r,omitempty"`
// new block must be equal 1
StrawCalcVersion uint8 `json:"straw_calc_version,omitempty"`
// new block must be equal ??
AllowedBucketAlgs uint32 `json:"allowed_bucket_algs,omitempty"`
// new block must be equal 1
ChooseleafStable uint8 `json:"chooseleaf_stable,omitempty"`
}
func legacyTunable() tunables {
return tunables{
ChooseLocalTries: 2,
ChooseLocalFallbackTries: 5,
ChooseTotalTries: 19,
ChooseleafDescendOnce: 0,
ChooseleafVaryR: 0,
ChooseleafStable: 0,
AllowedBucketAlgs: CrushLegacyAllowedBucketAlgs,
StrawCalcVersion: 0,
}
}
func (p *binaryParser) handleTunable() (map[string]interface{}, error) {
var err error
itunables := make(map[string]interface{})
tune := legacyTunable()
err = binary.Read(p.r, binary.LittleEndian, &tune.ChooseLocalTries)
if err != nil {
return nil, err
}
itunables["choose_local_tries"] = tune.ChooseLocalTries
err = binary.Read(p.r, binary.LittleEndian, &tune.ChooseTotalTries)
if err != nil {
return nil, err
}
itunables["choose_total_tries"] = tune.ChooseTotalTries
err = binary.Read(p.r, binary.LittleEndian, &tune.ChooseLocalFallbackTries)
if err != nil {
return nil, err
}
itunables["choose_local_fallback_tries"] = tune.ChooseLocalFallbackTries
err = binary.Read(p.r, binary.LittleEndian, &tune.ChooseleafDescendOnce)
if err != nil && err == io.EOF {
return itunables, nil
} else if err != nil {
return nil, err
}
itunables["chooseleaf_descend_once"] = tune.ChooseleafDescendOnce
err = binary.Read(p.r, binary.LittleEndian, &tune.ChooseleafVaryR)
if err != nil && err == io.EOF {
return itunables, nil
} else if err != nil {
return nil, err
}
itunables["chooseleaf_vary_r"] = tune.ChooseleafVaryR
err = binary.Read(p.r, binary.LittleEndian, &tune.StrawCalcVersion)
if err != nil && err == io.EOF {
return itunables, nil
} else if err != nil {
return nil, err
}
itunables["straw_calc_version"] = tune.StrawCalcVersion
err = binary.Read(p.r, binary.LittleEndian, &tune.AllowedBucketAlgs)
if err != nil && err == io.EOF {
return itunables, nil
} else if err != nil {
return nil, err
}
itunables["allowed_bucket_algs"] = tune.AllowedBucketAlgs
err = binary.Read(p.r, binary.LittleEndian, &tune.ChooseleafStable)
if err != nil && err == io.EOF {
return itunables, nil
} else if err != nil {
return nil, err
}
itunables["chooseleaf_stable"] = tune.ChooseleafStable
return itunables, nil
}

45
crushmap/binary_type.go Normal file
View File

@@ -0,0 +1,45 @@
package crushmap
import (
"encoding/binary"
)
func (p *binaryParser) handleType() ([]*Type, error) {
var err error
var n uint32
var itypes []*Type
err = binary.Read(p.r, binary.LittleEndian, &n)
if err != nil {
return nil, err
}
for i := n; i > 0; i-- {
var key int32
err = binary.Read(p.r, binary.LittleEndian, &key)
if err != nil {
return nil, err
}
var l uint32
err = binary.Read(p.r, binary.LittleEndian, &l)
if err != nil {
return nil, err
}
if l == 0 {
err = binary.Read(p.r, binary.LittleEndian, &l)
if err != nil {
return nil, err
}
}
val := make([]byte, l)
err = binary.Read(p.r, binary.LittleEndian, &val)
if err != nil {
return nil, err
}
itypes = append(itypes, &Type{ID: key, Name: string(val)})
}
return itypes, nil
}

8
crushmap/check.go Normal file
View File

@@ -0,0 +1,8 @@
package crushmap
/*
item 'slot1-3' in bucket 'cn01' has pos 3 >= size 3 (pos from 0 to len - 1 items)
in rule 'rule-data-ssd' item 'root' not defined (check items in take action step take root class ssd)
*/

241
crushmap/common.go Normal file
View File

@@ -0,0 +1,241 @@
package crushmap
import (
"fmt"
"sort"
"strconv"
)
const (
ReplicatedPG = 1
ErasurePG = 3
)
type CrushBucketType uint16
type CrushAlgType uint8
type CrushBucketHashType uint8
const (
CrushAlgInvalid CrushAlgType = iota
CrushAlgUniform
CrushAlgList
CrushAlgTree
CrushAlgStraw
CrushAlgStraw2
)
const (
CrushLegacyAllowedBucketAlgs = (1 << CrushAlgUniform) | (1 << CrushAlgList) | (1 << CrushAlgStraw)
)
var (
crushAlgTypeStringMap = map[CrushAlgType]string{
CrushAlgUniform: "uniform",
CrushAlgList: "list",
CrushAlgTree: "tree",
CrushAlgStraw: "straw",
CrushAlgStraw2: "straw2",
}
crushAlgStringTypeMap = map[string]CrushAlgType{
"uniform": CrushAlgUniform,
"list": CrushAlgList,
"tree": CrushAlgTree,
"straw": CrushAlgStraw,
"straw2": CrushAlgStraw2,
}
)
func (t CrushBucketType) String() string {
return fmt.Sprintf(strconv.Itoa(int(t)))
}
func (t CrushBucketHashType) String() string {
if t == 0 {
return "rjenkins1"
}
return fmt.Sprintf(strconv.Itoa(int(t)))
}
func (t CrushAlgType) String() string {
alg, ok := crushAlgTypeStringMap[t]
if !ok {
alg = "invalid"
}
return alg
}
func CrushAlgFromType(t CrushAlgType) (string, error) {
alg, ok := crushAlgTypeStringMap[t]
if !ok {
return "", fmt.Errorf("unknown crush bucket alg: %d", t)
}
return alg, nil
}
func CrushAlgFromString(t string) (CrushAlgType, error) {
alg, ok := crushAlgStringTypeMap[t]
if !ok {
return CrushAlgInvalid, fmt.Errorf("unknown crush bucket algo: %s", t)
}
return alg, nil
}
type Tunables struct {
// new block
ChooseLocalTries uint32 `json:"choose_local_tries,omitempty"`
ChooseLocalFallbackTries uint32 `json:"choose_local_fallback_tries,omitempty"`
ChooseTotalTries uint32 `json:"choose_total_tries,omitempty"`
// new block must be equal 1
ChooseleafDescendOnce uint32 `json:"chooseleaf_descend_once,omitempty"`
// new block must be equal 1
ChooseleafVaryR uint8 `json:"chooseleaf_vary_r,omitempty"`
// new block must be equal 1
StrawCalcVersion uint8 `json:"straw_calc_version,omitempty"`
// new block must be equal ??
AllowedBucketAlgs uint32 `json:"allowed_bucket_algs,omitempty"`
// new block must be equal 1
ChooseleafStable uint8 `json:"chooseleaf_stable,omitempty"`
//
/*
"profile": "firefly",
"optimal_tunables": 0,
"legacy_tunables": 0,
"minimum_required_version": "firefly",
"require_feature_tunables": 1,
"require_feature_tunables2": 1,
"has_v2_rules": 1,
"require_feature_tunables3": 1,
"has_v3_rules": 0,
"has_v4_buckets": 0,
"require_feature_tunables5": 0,
"has_v5_rules": 0
*/
}
type Step struct {
Op string `json:"op"`
Item int `json:"item,omitempty"`
ItemName string `json:"item_name,omitempty"`
ItemClass string `json:"item_class,omitempty"`
Num int32 `json:"num,omitempty"`
ItemType string `json:"type,omitempty"`
ItemTypeID int32 `json:"-"`
}
type Rule struct {
Name string `json:"rule_name"`
ID uint8 `json:"rule_id"`
Ruleset uint8 `json:"ruleset,omitempty"`
Type uint8 `json:"type"`
MinSize uint8 `json:"min_size,omitempty"`
MaxSize uint8 `json:"max_size,omitempty"`
Steps []*Step `json:"steps"`
}
type Item struct {
ID int32 `json:"id"`
Name string `json:"-,omitempty"`
Weight float32 `json:"weight"`
Pos int `json:"pos"`
}
type Bucket struct {
Name string `json:"name"`
TypeID CrushBucketType `json:"type_id"`
TypeName string `json:"type_name"`
Weight float32 `json:"weight"`
ID int32 `json:"id"`
IDClass string `json:"id_class,omitempty"`
Alg string `json:"alg"`
Hash string `json:"hash"`
Size uint32 `json:"-"`
Items []*Item `json:"items"`
}
type Device struct {
ID int32 `json:"id"`
Name string `json:"name"`
Class string `json:"class,omitempty"`
}
type Type struct {
ID int32 `json:"type_id"`
Name string `json:"name"`
}
type ChooseArg struct {
BucketID int32 `json:"bucket_id,omitempty"`
WeightSet []float64 `json:"weight_set,omitempty"`
IDs []int `json:"ids,omitempty"`
}
type Map struct {
Tunables map[string]interface{} `json:"tunables,omitempty"`
Devices []*Device `json:"devices"`
Types []*Type `json:"types"`
Buckets []*Bucket `json:"buckets"`
Rules []*Rule `json:"rules"`
ChooseArgs map[string]ChooseArg `json:"choose_args,omitempty"`
}
type CrushChild struct {
ID int `json:"id"`
Name string `json:"name"`
Weight float32 `json:"weight"`
}
type CrushTree struct {
Type string `json:"type"`
Name string `json:"name"`
ID int `json:"id"`
Children []*CrushChild `json:"children"`
}
type CrushRule struct {
Data [][]string `json:"data"`
}
type Crushmap struct {
Trees []*CrushTree `json:"trees"`
Rules []*CrushRule `json:"rules"`
}
func (m *Map) rulesSort() {
sort.Slice(m.Rules, func(i, j int) bool { return m.Rules[i].ID < m.Rules[j].ID })
}
func (m *Map) bucketsSort() {
sort.Slice(m.Buckets, func(i, j int) bool { return m.Buckets[i].TypeID > m.Buckets[j].TypeID })
}
func (m *Map) GetTypeIDByName(name string) uint16 {
for _, t := range m.Types {
if t.Name == name {
return uint16(t.ID)
}
}
return 0
}
func (m *Map) GetBucketByName(name string) *Bucket {
for _, b := range m.Buckets {
if b.Name == name {
return b
}
}
return nil
}
func (m *Map) GetBucketByID(id int32) *Bucket {
for _, b := range m.Buckets {
if b.ID == id {
return b
}
}
return nil
}
func NewMap() *Map {
return &Map{Tunables: make(map[string]interface{})}
}

17
crushmap/json.go Normal file
View File

@@ -0,0 +1,17 @@
package crushmap
import "encoding/json"
func (cmap *Map) DecodeJson(data []byte) error {
err := json.Unmarshal(data, &cmap)
if err != nil {
return err
}
cmap.rulesSort()
cmap.bucketsSort()
return nil
}
func (cmap *Map) EncodeJson() ([]byte, error) {
return json.Marshal(cmap)
}

20
crushmap/json_test.go Normal file
View File

@@ -0,0 +1,20 @@
package crushmap
import (
"io/ioutil"
"testing"
)
func TestJson(t *testing.T) {
buf, err := ioutil.ReadFile("testdata/map.json")
if err != nil {
t.Fatal(err)
}
m := NewMap()
err = m.DecodeJson(buf)
if err != nil {
t.Fatal(err)
}
_ = m
}

BIN
crushmap/testdata/map.bin vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.2 KiB

416
crushmap/testdata/map.json vendored Normal file
View File

@@ -0,0 +1,416 @@
{
"devices": [
{
"id": 0,
"name": "osd.0"
},
{
"id": 1,
"name": "osd.1"
},
{
"id": 2,
"name": "osd.2"
},
{
"id": 3,
"name": "osd.3"
},
{
"id": 4,
"name": "osd.4"
},
{
"id": 5,
"name": "osd.5"
}
],
"types": [
{
"type_id": 0,
"name": "osd"
},
{
"type_id": 1,
"name": "host"
},
{
"type_id": 2,
"name": "chassis"
},
{
"type_id": 3,
"name": "rack"
},
{
"type_id": 4,
"name": "row"
},
{
"type_id": 5,
"name": "pdu"
},
{
"type_id": 6,
"name": "pod"
},
{
"type_id": 7,
"name": "room"
},
{
"type_id": 8,
"name": "datacenter"
},
{
"type_id": 9,
"name": "region"
},
{
"type_id": 10,
"name": "root"
},
{
"type_id": 11,
"name": "disktype"
}
],
"buckets": [
{
"id": -1,
"name": "default",
"type_id": 10,
"type_name": "root",
"weight": 109509,
"alg": "straw",
"hash": "rjenkins1",
"items": [
{
"id": -2,
"weight": 36503,
"pos": 0
},
{
"id": -3,
"weight": 36503,
"pos": 1
},
{
"id": -4,
"weight": 36503,
"pos": 2
}
]
},
{
"id": -2,
"name": "rmosd1",
"type_id": 1,
"type_name": "host",
"weight": 36569,
"alg": "straw",
"hash": "rjenkins1",
"items": [
{
"id": -5,
"weight": 32768,
"pos": 0
},
{
"id": -6,
"weight": 3801,
"pos": 1
}
]
},
{
"id": -3,
"name": "rmosd2",
"type_id": 1,
"type_name": "host",
"weight": 36569,
"alg": "straw",
"hash": "rjenkins1",
"items": [
{
"id": -7,
"weight": 32768,
"pos": 0
},
{
"id": -8,
"weight": 3801,
"pos": 1
}
]
},
{
"id": -4,
"name": "rmosd3",
"type_id": 1,
"type_name": "host",
"weight": 36569,
"alg": "straw",
"hash": "rjenkins1",
"items": [
{
"id": -9,
"weight": 32768,
"pos": 0
},
{
"id": -10,
"weight": 3801,
"pos": 1
}
]
},
{
"id": -5,
"name": "rmosd1_ssd",
"type_id": 11,
"type_name": "disktype",
"weight": 3801,
"alg": "straw",
"hash": "rjenkins1",
"items": [
{
"id": 3,
"weight": 3801,
"pos": 0
}
]
},
{
"id": -6,
"name": "rmosd1_spinning",
"type_id": 11,
"type_name": "disktype",
"weight": 32768,
"alg": "straw",
"hash": "rjenkins1",
"items": [
{
"id": 0,
"weight": 32768,
"pos": 0
}
]
},
{
"id": -7,
"name": "rmosd2_ssd",
"type_id": 11,
"type_name": "disktype",
"weight": 3801,
"alg": "straw",
"hash": "rjenkins1",
"items": [
{
"id": 4,
"weight": 3801,
"pos": 0
}
]
},
{
"id": -8,
"name": "rmosd2_spinning",
"type_id": 11,
"type_name": "disktype",
"weight": 32768,
"alg": "straw",
"hash": "rjenkins1",
"items": [
{
"id": 1,
"weight": 32768,
"pos": 0
}
]
},
{
"id": -9,
"name": "rmosd3_ssd",
"type_id": 11,
"type_name": "disktype",
"weight": 3801,
"alg": "straw",
"hash": "rjenkins1",
"items": [
{
"id": 5,
"weight": 3801,
"pos": 0
}
]
},
{
"id": -10,
"name": "rmosd3_spinning",
"type_id": 11,
"type_name": "disktype",
"weight": 32768,
"alg": "straw",
"hash": "rjenkins1",
"items": [
{
"id": 2,
"weight": 32768,
"pos": 0
}
]
},
{
"id": -11,
"name": "spinning",
"type_id": 10,
"type_name": "root",
"weight": 98304,
"alg": "straw",
"hash": "rjenkins1",
"items": [
{
"id": -6,
"weight": 32768,
"pos": 0
},
{
"id": -8,
"weight": 32768,
"pos": 1
},
{
"id": -10,
"weight": 32768,
"pos": 2
}
]
},
{
"id": -12,
"name": "ssd",
"type_id": 10,
"type_name": "root",
"weight": 11403,
"alg": "straw",
"hash": "rjenkins1",
"items": [
{
"id": -5,
"weight": 3801,
"pos": 0
},
{
"id": -7,
"weight": 3801,
"pos": 1
},
{
"id": -9,
"weight": 3801,
"pos": 2
}
]
}
],
"rules": [
{
"rule_id": 0,
"rule_name": "replicated_ruleset",
"ruleset": 0,
"type": 1,
"min_size": 1,
"max_size": 10,
"steps": [
{
"op": "take",
"item": -11,
"item_name": "spinning"
},
{
"op": "chooseleaf_firstn",
"num": 0,
"type": "disktype"
},
{
"op": "emit"
}
]
},
{
"rule_id": 1,
"rule_name": "spinning",
"ruleset": 1,
"type": 3,
"min_size": 3,
"max_size": 20,
"steps": [
{
"op": "set_chooseleaf_tries",
"num": 5
},
{
"op": "take",
"item": -11,
"item_name": "spinning"
},
{
"op": "chooseleaf_indep",
"num": 0,
"type": "osd"
},
{
"op": "emit"
}
]
},
{
"rule_id": 2,
"rule_name": "ssd",
"ruleset": 2,
"type": 1,
"min_size": 1,
"max_size": 10,
"steps": [
{
"op": "take",
"item": -12,
"item_name": "ssd"
},
{
"op": "chooseleaf_firstn",
"num": 0,
"type": "disktype"
},
{
"op": "emit"
}
]
}
],
"tunables": {
"choose_local_tries": 0,
"choose_local_fallback_tries": 0,
"choose_total_tries": 50,
"chooseleaf_descend_once": 1,
"chooseleaf_vary_r": 1,
"chooseleaf_stable": 0,
"straw_calc_version": 1,
"allowed_bucket_algs": 22,
"profile": "firefly",
"optimal_tunables": 0,
"legacy_tunables": 0,
"minimum_required_version": "firefly",
"require_feature_tunables": 1,
"require_feature_tunables2": 1,
"has_v2_rules": 1,
"require_feature_tunables3": 1,
"has_v3_rules": 0,
"has_v4_buckets": 0,
"require_feature_tunables5": 0,
"has_v5_rules": 0
},
"choose_args": {}
}

151
crushmap/testdata/map.txt vendored Normal file
View File

@@ -0,0 +1,151 @@
# begin crush map
tunable choose_local_tries 0
tunable choose_local_fallback_tries 0
tunable choose_total_tries 50
tunable chooseleaf_descend_once 1
tunable chooseleaf_vary_r 1
tunable straw_calc_version 1
# devices
device 0 osd.0
device 1 osd.1
device 2 osd.2
device 3 osd.3
device 4 osd.4
device 5 osd.5
# types
type 0 osd
type 1 host
type 2 chassis
type 3 rack
type 4 row
type 5 pdu
type 6 pod
type 7 room
type 8 datacenter
type 9 region
type 10 root
type 11 disktype
# buckets
disktype rmosd1_ssd {
id -5 # do not change unnecessarily
# weight 0.058
alg straw
hash 0 # rjenkins1
item osd.3 weight 0.058
}
disktype rmosd1_spinning {
id -6 # do not change unnecessarily
# weight 0.500
alg straw
hash 0 # rjenkins1
item osd.0 weight 0.500
}
host rmosd1 {
id -2 # do not change unnecessarily
# weight 0.557
alg straw
hash 0 # rjenkins1
item rmosd1_ssd weight 0.500
item rmosd1_spinning weight 0.058
}
disktype rmosd2_ssd {
id -7 # do not change unnecessarily
# weight 0.058
alg straw
hash 0 # rjenkins1
item osd.4 weight 0.058
}
disktype rmosd2_spinning {
id -8 # do not change unnecessarily
# weight 0.500
alg straw
hash 0 # rjenkins1
item osd.1 weight 0.500
}
host rmosd2 {
id -3 # do not change unnecessarily
# weight 0.557
alg straw
hash 0 # rjenkins1
item rmosd2_ssd weight 0.500
item rmosd2_spinning weight 0.058
}
disktype rmosd3_ssd {
id -9 # do not change unnecessarily
# weight 0.058
alg straw
hash 0 # rjenkins1
item osd.5 weight 0.058
}
disktype rmosd3_spinning {
id -10 # do not change unnecessarily
# weight 0.500
alg straw
hash 0 # rjenkins1
item osd.2 weight 0.500
}
host rmosd3 {
id -4 # do not change unnecessarily
# weight 0.557
alg straw
hash 0 # rjenkins1
item rmosd3_ssd weight 0.500
item rmosd3_spinning weight 0.058
}
root default {
id -1 # do not change unnecessarily
# weight 1.672
alg straw
hash 0 # rjenkins1
item rmosd1 weight 0.557
item rmosd2 weight 0.557
item rmosd3 weight 0.557
}
root spinning {
id -11 # do not change unnecessarily
# weight 1.500
alg straw
hash 0 # rjenkins1
item rmosd1_spinning weight 0.500
item rmosd2_spinning weight 0.500
item rmosd3_spinning weight 0.500
}
root ssd {
id -12 # do not change unnecessarily
# weight 0.174
alg straw
hash 0 # rjenkins1
item rmosd1_ssd weight 0.058
item rmosd2_ssd weight 0.058
item rmosd3_ssd weight 0.058
}
# rules
rule replicated_ruleset {
ruleset 0
type replicated
min_size 1
max_size 10
step take spinning
step chooseleaf firstn 0 type disktype
step emit
}
rule spinning {
ruleset 1
type erasure
min_size 3
max_size 20
step set_chooseleaf_tries 5
step take spinning
step chooseleaf indep 0 type osd
step emit
}
rule ssd {
ruleset 2
type replicated
min_size 1
max_size 10
step take ssd
step chooseleaf firstn 0 type disktype
step emit
}
# end crush map

151
crushmap/testdata/map.txt2 vendored Normal file
View File

@@ -0,0 +1,151 @@
# begin crush map
tunable choose_local_tries 0
tunable choose_local_fallback_tries 0
tunable choose_total_tries 50
tunable chooseleaf_descend_once 1
tunable chooseleaf_vary_r 1
tunable straw_calc_version 1
# devices
device 0 osd.0
device 1 osd.1
device 2 osd.2
device 3 osd.3
device 4 osd.4
device 5 osd.5
# types
type 0 osd
type 1 host
type 2 chassis
type 3 rack
type 4 row
type 5 pdu
type 6 pod
type 7 room
type 8 datacenter
type 9 region
type 10 root
type 11 disktype
# buckets
disktype rmosd1_ssd {
id -5 # do not change unnecessarily
# weight 0.058
alg straw2
hash 0 # rjenkins1
item osd.3 weight 0.058
}
disktype rmosd1_spinning {
id -6 # do not change unnecessarily
# weight 0.500
alg straw2
hash 0 # rjenkins1
item osd.0 weight 0.500
}
host rmosd1 {
id -2 # do not change unnecessarily
# weight 0.557
alg straw2
hash 0 # rjenkins1
item rmosd1_ssd weight 0.500
item rmosd1_spinning weight 0.058
}
disktype rmosd2_ssd {
id -7 # do not change unnecessarily
# weight 0.058
alg straw2
hash 0 # rjenkins1
item osd.4 weight 0.058
}
disktype rmosd2_spinning {
id -8 # do not change unnecessarily
# weight 0.500
alg straw2
hash 0 # rjenkins1
item osd.1 weight 0.500
}
host rmosd2 {
id -3 # do not change unnecessarily
# weight 0.557
alg straw2
hash 0 # rjenkins1
item rmosd2_ssd weight 0.500
item rmosd2_spinning weight 0.058
}
disktype rmosd3_ssd {
id -9 # do not change unnecessarily
# weight 0.058
alg straw2
hash 0 # rjenkins1
item osd.5 weight 0.058
}
disktype rmosd3_spinning {
id -10 # do not change unnecessarily
# weight 0.500
alg straw2
hash 0 # rjenkins1
item osd.2 weight 0.500
}
host rmosd3 {
id -4 # do not change unnecessarily
# weight 0.557
alg straw2
hash 0 # rjenkins1
item rmosd3_ssd weight 0.500
item rmosd3_spinning weight 0.058
}
root default {
id -1 # do not change unnecessarily
# weight 1.672
alg straw2
hash 0 # rjenkins1
item rmosd1 weight 0.557
item rmosd2 weight 0.557
item rmosd3 weight 0.557
}
root spinning {
id -11 # do not change unnecessarily
# weight 1.500
alg straw2
hash 0 # rjenkins1
item rmosd1_spinning weight 0.500
item rmosd2_spinning weight 0.500
item rmosd3_spinning weight 0.500
}
root ssd {
id -12 # do not change unnecessarily
# weight 0.174
alg straw2
hash 0 # rjenkins1
item rmosd1_ssd weight 0.058
item rmosd2_ssd weight 0.058
item rmosd3_ssd weight 0.058
}
# rules
rule replicated_ruleset {
ruleset 0
type replicated
min_size 1
max_size 10
step take spinning
step chooseleaf firstn 0 type disktype
step emit
}
rule spinning {
ruleset 1
type erasure
min_size 3
max_size 20
step set_chooseleaf_tries 5
step take spinning
step chooseleaf indep 0 type osd
step emit
}
rule ssd {
ruleset 2
type replicated
min_size 1
max_size 10
step take ssd
step chooseleaf firstn 0 type disktype
step emit
}
# end crush map

147
crushmap/text.go Normal file
View File

@@ -0,0 +1,147 @@
package crushmap
import (
"fmt"
"sync"
)
type textParser struct {
l *lex
}
func identState(l *lex) stateFn {
loop:
for {
r := l.lexPeek()
switch r {
case ' ':
break loop
default:
l.lexNext()
}
}
switch l.lexCurrent() {
case "device":
l.lexIgnore()
l.lexPush(topState)
return deviceState
case "type":
l.lexIgnore()
l.lexPush(topState)
return typeState
case "rule":
l.lexIgnore()
l.lexPush(topState)
return ruleState
case "tunable":
l.lexIgnore()
l.lexPush(topState)
return tunableState
}
l.lexPush(topState)
return bucketState
}
func topState(l *lex) stateFn {
for {
r := l.lexPeek()
switch r {
case ' ':
l.lexNext()
l.lexIgnore()
case '\n':
l.lexNext()
l.lexIgnore()
case EOFRune:
l.lexEmit(itemEOF)
return nil
case '#':
l.lexNext()
l.lexIgnore()
l.lexPush(topState)
return commentLineState
default:
return identState
}
}
return nil
}
func (cmap *Map) DecodeText(data []byte) error {
var mu sync.Mutex
mapItems := make(map[string]int32)
p := &textParser{l: lexNew(string(data), topState)}
p.l.lexStartSync()
loop:
for {
tok, done := p.l.lexNextToken()
if done {
break loop
}
switch tok.itype {
case itemEOF:
break loop
case itemComment:
continue
case itemTunableBeg:
if itunekey, ituneval, err := p.handleTunable(); err != nil {
return err
} else {
cmap.Tunables[itunekey] = ituneval
}
case itemDeviceBeg:
if idevice, err := p.handleDevice(); err != nil {
return err
} else {
mu.Lock()
mapItems[idevice.Name] = idevice.ID
mu.Unlock()
cmap.Devices = append(cmap.Devices, idevice)
}
case itemTypeBeg:
if itype, err := p.handleType(); err != nil {
return err
} else {
mu.Lock()
mapItems[itype.Name] = itype.ID
mu.Unlock()
cmap.Types = append(cmap.Types, itype)
}
case itemRuleBeg:
if irule, err := p.handleRule(); err != nil {
return err
} else {
cmap.Rules = append(cmap.Rules, irule)
}
case itemBucketBeg:
if ibucket, err := p.handleBucket(tok.ivalue); err != nil {
return err
} else {
mu.Lock()
mapItems[ibucket.Name] = ibucket.ID
mu.Unlock()
cmap.Buckets = append(cmap.Buckets, ibucket)
}
default:
return fmt.Errorf("error: %s\n", tok.ivalue)
}
}
for idx := range cmap.Buckets {
id, ok := mapItems[cmap.Buckets[idx].TypeName]
if !ok {
return fmt.Errorf("invalid bucket type: %s", cmap.Buckets[idx].TypeName)
}
cmap.Buckets[idx].TypeID = CrushBucketType(id)
}
cmap.rulesSort()
cmap.bucketsSort()
return nil
}

337
crushmap/text_bucket.go Normal file
View File

@@ -0,0 +1,337 @@
package crushmap
import (
"errors"
"fmt"
"strconv"
)
func bucketState(l *lex) stateFn {
l.lexEmit(itemBucketBeg)
return bucketStartState
}
func bucketStartState(l *lex) stateFn {
l.lexTake(" \t")
l.lexIgnore()
r := l.lexPeek()
switch r {
case '{':
l.lexNext()
l.lexIgnore()
return bucketIdentState
case '#':
l.lexErr(fmt.Sprintf("unexpected token %q", r))
return l.lexPop()
}
return bucketNameState
}
func bucketNameState(l *lex) stateFn {
l.lexTake(" \t")
l.lexIgnore()
loop:
for {
r := l.lexPeek()
switch r {
case '{':
l.lexNext()
l.lexIgnore()
break loop
case ' ':
break loop
case '\n', '#':
l.lexErr(fmt.Sprintf("unexpected token %q", r))
return l.lexPop()
default:
l.lexNext()
}
}
l.lexEmit(itemBucketName)
return bucketIdentState
}
func bucketIDState(l *lex) stateFn {
l.lexTake(" \t")
l.lexIgnore()
loop1:
for {
r := l.lexPeek()
switch r {
case ' ', '\n', '#', '\t':
break loop1
}
l.lexNext()
}
l.lexEmit(itemBucketID)
l.lexTake(" \t")
l.lexIgnore()
loop2:
for {
r := l.lexPeek()
switch r {
case ' ', '\n', '#', '\t':
break loop2
default:
l.lexNext()
}
}
switch l.lexCurrent() {
case "class":
l.lexIgnore()
return bucketIDClassState
}
return bucketIdentState
}
func bucketIDClassState(l *lex) stateFn {
l.lexTake(" \t")
l.lexIgnore()
loop:
for {
r := l.lexPeek()
switch r {
case ' ', '\n', '#', '\t':
break loop
default:
l.lexNext()
}
}
l.lexEmit(itemBucketIDClass)
return bucketIdentState
}
func bucketHashState(l *lex) stateFn {
l.lexTake(" \t")
l.lexIgnore()
l.lexTake("0123456789")
l.lexEmit(itemBucketHash)
return bucketIdentState
}
func bucketAlgState(l *lex) stateFn {
l.lexTake(" \t")
l.lexIgnore()
for {
r := l.lexPeek()
if r == '\n' || r == ' ' || r == '#' {
break
}
l.lexNext()
}
l.lexEmit(itemBucketAlg)
return bucketIdentState
}
func bucketItemState(l *lex) stateFn {
l.lexTake(" \t")
l.lexIgnore()
loop:
for {
r := l.lexPeek()
switch r {
case ' ':
break loop
case '\n', '#':
l.lexErr(fmt.Sprintf("unexpected token %q", r))
return l.lexPop()
}
l.lexNext()
}
l.lexEmit(itemBucketItemName)
return bucketItemIdentState
}
func bucketItemIdentState(l *lex) stateFn {
l.lexTake(" \t")
l.lexIgnore()
loop:
for {
r := l.lexPeek()
switch r {
case ' ', '\n':
break loop
case '#':
break loop
default:
l.lexNext()
}
}
switch l.lexCurrent() {
case "weight":
l.lexIgnore()
return bucketItemWeightState
case "pos":
l.lexIgnore()
return bucketItemPosState
}
l.lexEmit(itemBucketItemEnd)
return bucketIdentState
}
func bucketItemWeightState(l *lex) stateFn {
l.lexTake(" \t")
l.lexIgnore()
l.lexTake(".0123456789")
l.lexEmit(itemBucketItemWeight)
return bucketItemIdentState
}
func bucketItemPosState(l *lex) stateFn {
l.lexTake(" \t")
l.lexIgnore()
l.lexTake("0123456789")
l.lexEmit(itemBucketItemPos)
return bucketItemIdentState
}
func bucketIdentState(l *lex) stateFn {
l.lexTake(" \t")
l.lexIgnore()
loop:
for {
r := l.lexPeek()
switch r {
case ' ':
break loop
case '#':
l.lexNext()
l.lexIgnore()
l.lexPush(bucketIdentState)
return commentLineState
case '}':
l.lexNext()
l.lexIgnore()
l.lexEmit(itemBucketEnd)
return l.lexPop()
case '\n':
l.lexNext()
l.lexIgnore()
return bucketIdentState
default:
l.lexNext()
}
}
switch l.lexCurrent() {
case "id":
l.lexIgnore()
return bucketIDState
case "alg":
l.lexIgnore()
return bucketAlgState
case "hash":
l.lexIgnore()
return bucketHashState
case "item":
l.lexIgnore()
l.lexEmit(itemBucketItemBeg)
return bucketItemState
}
return l.lexPop()
}
func (p *textParser) handleBucket(itype string) (*Bucket, error) {
ibucket := &Bucket{TypeName: itype}
Loop:
for {
tok, done := p.l.lexNextToken()
if done {
break Loop
}
switch tok.itype {
case itemEOF, itemBucketEnd:
break Loop
case itemComment:
continue
case itemBucketName:
ibucket.Name = tok.ivalue
case itemBucketIDClass:
ibucket.IDClass = tok.ivalue
case itemBucketID:
id, err := strconv.Atoi(tok.ivalue)
if err != nil {
return nil, err
}
ibucket.ID = int32(id)
case itemBucketAlg:
ibucket.Alg = tok.ivalue
case itemBucketHash:
if tok.ivalue == "0" {
ibucket.Hash = "rjenkins1"
} else {
return nil, errors.New("invalid bucket hash")
}
case itemBucketItemBeg:
item, err := p.handleBucketItem()
if err != nil {
return nil, err
}
ibucket.Items = append(ibucket.Items, item)
}
}
return ibucket, nil
}
func (p *textParser) handleBucketItem() (*Item, error) {
item := &Item{}
Loop:
for {
tok, done := p.l.lexNextToken()
if done {
break Loop
}
switch tok.itype {
case itemEOF, itemBucketItemEnd:
break Loop
case itemComment:
continue
case itemBucketItemName:
item.Name = tok.ivalue
case itemBucketItemWeight:
id, err := strconv.ParseFloat(tok.ivalue, 32)
if err != nil {
return nil, err
}
item.Weight = float32(id)
case itemBucketItemPos:
id, err := strconv.Atoi(tok.ivalue)
if err != nil {
return nil, err
}
item.Pos = id
}
}
return item, nil
}

14
crushmap/text_comment.go Normal file
View File

@@ -0,0 +1,14 @@
package crushmap
func commentLineState(l *lex) stateFn {
loop:
for {
if r := l.lexPeek(); r == '\n' {
l.lexNext()
break loop
}
l.lexNext()
}
l.lexEmitTrim(itemComment)
return l.lexPop()
}

135
crushmap/text_device.go Normal file
View File

@@ -0,0 +1,135 @@
package crushmap
import (
"errors"
"fmt"
"strconv"
)
func deviceState(l *lex) stateFn {
l.lexIgnore()
l.lexEmit(itemDeviceBeg)
return deviceIDState
}
func deviceIDState(l *lex) stateFn {
l.lexTake(" \t")
l.lexIgnore()
loop:
for {
r := l.lexPeek()
switch r {
case '\n', '#':
l.lexErr(fmt.Sprintf("unexpected token %q", r))
return l.lexPop()
case ' ':
break loop
default:
l.lexNext()
}
}
l.lexEmit(itemDeviceID)
return deviceNameState
}
func deviceNameState(l *lex) stateFn {
l.lexTake(" \t")
l.lexIgnore()
loop:
for {
r := l.lexPeek()
switch r {
case ' ', '\n', '#', '\t':
break loop
default:
l.lexNext()
}
}
l.lexEmit(itemDeviceName)
return deviceIdentState
}
func deviceIdentState(l *lex) stateFn {
l.lexTake(" \t")
l.lexIgnore()
loop:
for {
r := l.lexPeek()
switch r {
case '\n', '#':
l.lexEmit(itemDeviceEnd)
return l.lexPop()
case ' ':
break loop
default:
l.lexNext()
}
}
switch l.lexCurrent() {
case "class":
l.lexIgnore()
return deviceClassState
}
return l.lexPop()
}
func deviceClassState(l *lex) stateFn {
l.lexTake(" \t")
l.lexIgnore()
loop:
for {
r := l.lexPeek()
switch r {
case '\n', '#', ' ':
break loop
default:
l.lexNext()
}
}
l.lexEmit(itemDeviceClass)
return deviceIdentState
}
func (p *textParser) handleDevice() (*Device, error) {
idevice := &Device{ID: -1}
Loop:
for {
tok, done := p.l.lexNextToken()
if done {
break Loop
}
switch tok.itype {
case itemEOF, itemDeviceEnd:
break Loop
case itemComment:
continue
case itemDeviceID:
id, err := strconv.Atoi(tok.ivalue)
if err != nil {
return nil, err
}
idevice.ID = int32(id)
case itemDeviceName:
idevice.Name = tok.ivalue
case itemDeviceClass:
idevice.Class = tok.ivalue
}
}
if idevice.Name == "" {
return nil, errors.New("invalid device")
}
return idevice, nil
}

229
crushmap/text_lexer.go Normal file
View File

@@ -0,0 +1,229 @@
package crushmap
import (
"errors"
"strings"
"unicode/utf8"
)
type stateFn func(*lex) stateFn
type tokType int
const (
EOFRune rune = -1
)
const (
itemError tokType = iota
itemEOF
itemComment
itemTunableBeg
itemTunableKey
itemTunableVal
itemTunableEnd
itemDeviceBeg
itemDeviceID
itemDeviceName
itemDeviceClass
itemDeviceEnd
itemTypeBeg
itemTypeID
itemTypeName
itemTypeEnd
itemBucketBeg
itemBucketName
itemBucketID
itemBucketIDClass
itemBucketAlg
itemBucketHash
itemBucketItemBeg
itemBucketItemName
itemBucketItemWeight
itemBucketItemPos
itemBucketItemEnd
itemBucketEnd
itemRuleBeg
itemRuleName
itemRuleID
itemRuleRuleset
itemRuleType
itemRuleMinSize
itemRuleMaxSize
itemRuleStepBeg
itemRuleStepSetChooseleafTries
itemRuleStepSetChooseTries
itemRuleStepTake
itemRuleStepTakeType
itemRuleStepChoose
itemRuleStepTakeClass
itemRuleStepChooseFirstN
itemRuleStepChooseIndep
itemRuleStepChooseType
itemRuleStepEmit
itemRuleStepEnd
itemRuleEnd
)
type item struct {
itype tokType
ivalue string
iline int
}
type lex struct {
source string
start int
position int
line int
startState stateFn
err error
items chan item
errHandler func(string)
rewind runeStack
stack []stateFn
}
func lexNew(src string, start stateFn) *lex {
buffSize := len(src) / 2
if buffSize <= 0 {
buffSize = 1
}
return &lex{
source: src,
startState: start,
line: 1,
rewind: newRuneStack(),
items: make(chan item, buffSize),
stack: make([]stateFn, 0, 10),
}
}
func (l *lex) lexStart() {
go l.lexRun()
}
func (l *lex) lexStartSync() {
l.lexRun()
}
func lexIsDigit(r rune) bool {
return r >= '0' && r <= '9'
}
func (l *lex) lexCurrent() string {
return l.source[l.start:l.position]
}
func (l *lex) lexEmit(t tokType) {
itm := item{
itype: t,
ivalue: l.lexCurrent(),
}
l.items <- itm
l.start = l.position
l.rewind.clear()
}
func (l *lex) lexEmitTrim(t tokType) {
itm := item{
itype: t,
ivalue: strings.TrimSpace(l.lexCurrent()),
}
l.items <- itm
l.start = l.position
l.rewind.clear()
}
func (l *lex) lexIgnore() {
l.rewind.clear()
l.start = l.position
}
func (l *lex) lexPeek() rune {
r := l.lexNext()
l.lexRewind()
return r
}
func (l *lex) lexRewind() {
r := l.rewind.pop()
if r > EOFRune {
size := utf8.RuneLen(r)
l.position -= size
if l.position < l.start {
l.position = l.start
}
}
}
func (l *lex) lexNext() rune {
var (
r rune
s int
)
str := l.source[l.position:]
if len(str) == 0 {
r, s = EOFRune, 0
} else {
r, s = utf8.DecodeRuneInString(str)
}
l.position += s
l.rewind.push(r)
return r
}
func (l *lex) lexPush(state stateFn) {
l.stack = append(l.stack, state)
}
func (l *lex) lexPop() stateFn {
if len(l.stack) == 0 {
l.lexErr("BUG in lexer: no states to pop")
}
last := l.stack[len(l.stack)-1]
l.stack = l.stack[0 : len(l.stack)-1]
return last
}
func (l *lex) lexTake(chars string) {
r := l.lexNext()
for strings.ContainsRune(chars, r) {
r = l.lexNext()
}
l.lexRewind() // last next wasn't a match
}
func (l *lex) lexNextToken() (*item, bool) {
if itm, ok := <-l.items; ok {
return &itm, false
} else {
return nil, true
}
}
func (l *lex) lexErr(e string) {
if l.errHandler != nil {
l.err = errors.New(e)
l.errHandler(e)
} else {
panic(e)
}
}
func (l *lex) lexRun() {
state := l.startState
for state != nil {
state = state(l)
}
close(l.items)
}

462
crushmap/text_rule.go Normal file
View File

@@ -0,0 +1,462 @@
package crushmap
import (
"errors"
"fmt"
"strconv"
)
func ruleState(l *lex) stateFn {
l.lexEmit(itemRuleBeg)
l.lexTake(" \t")
l.lexIgnore()
r := l.lexPeek()
switch r {
case '{':
l.lexNext()
l.lexIgnore()
return ruleIdentState
case '#':
l.lexErr(fmt.Sprintf("unexpected token %q", r))
return l.lexPop()
}
return ruleNameState
}
func ruleIdentState(l *lex) stateFn {
l.lexTake(" \t")
l.lexIgnore()
loop:
for {
r := l.lexPeek()
switch r {
case ' ':
break loop
case '#':
l.lexNext()
l.lexIgnore()
l.lexPush(ruleIdentState)
return commentLineState
case '}':
l.lexNext()
l.lexIgnore()
l.lexEmit(itemRuleEnd)
return l.lexPop()
case '\n':
l.lexNext()
l.lexIgnore()
return ruleIdentState
default:
l.lexNext()
}
}
switch l.lexCurrent() {
case "id":
l.lexIgnore()
return ruleRuleIDState
case "ruleset":
l.lexIgnore()
return ruleRulesetState
case "min_size":
l.lexIgnore()
return ruleMinSizeState
case "max_size":
l.lexIgnore()
return ruleMaxSizeState
case "type":
l.lexIgnore()
return ruleTypeState
case "step":
l.lexIgnore()
l.lexEmit(itemRuleStepBeg)
return ruleStepIdentState
}
return l.lexPop()
}
func ruleStepIdentState(l *lex) stateFn {
l.lexTake(" \t")
l.lexIgnore()
loop:
for {
r := l.lexPeek()
switch r {
case ' ', '\n', '#', '\t':
break loop
default:
l.lexNext()
}
}
switch l.lexCurrent() {
case "set_chooseleaf_tries":
l.lexIgnore()
return ruleStepSetChooseleafTries
case "set_choose_tries":
l.lexIgnore()
return ruleStepSetChooseTries
case "take":
l.lexIgnore()
l.lexEmit(itemRuleStepTake)
return ruleStepTake
case "chooseleaf", "choose":
l.lexEmit(itemRuleStepChoose)
return ruleStepChoose
case "emit":
l.lexEmit(itemRuleStepEmit)
return ruleStepEmit
}
return ruleIdentState
}
func ruleStepSetChooseleafTries(l *lex) stateFn {
l.lexTake("0123456789")
l.lexEmit(itemRuleStepSetChooseleafTries)
return ruleIdentState
}
func ruleStepSetChooseTries(l *lex) stateFn {
l.lexTake("0123456789")
l.lexEmit(itemRuleStepSetChooseTries)
return ruleIdentState
}
func ruleStepChoose(l *lex) stateFn {
l.lexTake(" \t")
l.lexIgnore()
loop:
for {
r := l.lexPeek()
switch r {
case ' ', '\n', '#', '\t':
break loop
default:
l.lexNext()
}
}
switch l.lexCurrent() {
case "firstn":
l.lexIgnore()
return ruleStepChooseFirstN
case "indep":
l.lexIgnore()
return ruleStepChooseIndep
case "type":
l.lexIgnore()
return ruleStepChooseType
}
l.lexEmit(itemRuleStepEnd)
return ruleIdentState
}
func ruleStepChooseFirstN(l *lex) stateFn {
l.lexTake(" \t")
l.lexIgnore()
l.lexTake("0123456789")
l.lexEmit(itemRuleStepChooseFirstN)
return ruleStepChoose
}
func ruleStepChooseIndep(l *lex) stateFn {
l.lexTake(" \t")
l.lexIgnore()
l.lexTake("0123456789")
l.lexEmit(itemRuleStepChooseIndep)
return ruleStepChoose
}
func ruleStepChooseType(l *lex) stateFn {
l.lexTake(" \t")
l.lexIgnore()
loop:
for {
r := l.lexPeek()
switch r {
case ' ', '\n', '#', '\t':
break loop
default:
l.lexNext()
}
}
l.lexEmit(itemRuleStepChooseType)
return ruleStepChoose
}
func ruleStepEmit(l *lex) stateFn {
l.lexTake(" \t")
l.lexIgnore()
l.lexEmit(itemRuleStepEnd)
return ruleIdentState
}
func ruleStepTake(l *lex) stateFn {
l.lexTake(" \t")
l.lexIgnore()
loop1:
for {
r := l.lexPeek()
switch r {
case ' ', '\n', '#', '\t':
break loop1
default:
l.lexNext()
}
}
l.lexEmit(itemRuleStepTakeType)
l.lexTake(" \t")
l.lexIgnore()
loop2:
for {
r := l.lexPeek()
switch r {
case ' ', '\n', '#', '\t':
break loop2
default:
l.lexNext()
}
}
switch l.lexCurrent() {
case "class":
l.lexIgnore()
return ruleStepTakeClass
}
l.lexEmit(itemRuleStepEnd)
return ruleIdentState
}
func ruleStepTakeClass(l *lex) stateFn {
l.lexTake(" \t")
l.lexIgnore()
loop:
for {
r := l.lexPeek()
switch r {
case ' ', '\n', '#', '\t':
break loop
default:
l.lexNext()
}
}
l.lexEmit(itemRuleStepTakeClass)
l.lexEmit(itemRuleStepEnd)
return ruleIdentState
}
func ruleNameState(l *lex) stateFn {
loop:
for {
r := l.lexPeek()
switch r {
case '{':
l.lexNext()
l.lexIgnore()
break loop
case ' ':
break loop
case '\n', '#':
l.lexErr(fmt.Sprintf("unexpected token %q", r))
return l.lexPop()
default:
l.lexNext()
}
}
l.lexEmit(itemRuleName)
return ruleIdentState
}
func ruleRulesetState(l *lex) stateFn {
l.lexTake(" \t")
l.lexIgnore()
for {
r := l.lexPeek()
if r == '\n' || r == ' ' || r == '#' {
break
}
l.lexNext()
}
l.lexEmit(itemRuleRuleset)
return ruleIdentState
}
func ruleRuleIDState(l *lex) stateFn {
l.lexTake(" \t")
l.lexIgnore()
for {
r := l.lexPeek()
if r == '\n' || r == ' ' || r == '#' {
break
}
l.lexNext()
}
l.lexEmit(itemRuleID)
return ruleIdentState
}
func ruleMinSizeState(l *lex) stateFn {
l.lexTake(" \t")
l.lexIgnore()
l.lexTake("0123456789")
l.lexEmit(itemRuleMinSize)
return ruleIdentState
}
func ruleMaxSizeState(l *lex) stateFn {
l.lexTake(" \t")
l.lexIgnore()
l.lexTake("0123456789")
l.lexEmit(itemRuleMaxSize)
return ruleIdentState
}
func ruleTypeState(l *lex) stateFn {
l.lexTake(" \t")
l.lexIgnore()
for {
r := l.lexPeek()
if r == '\n' || r == ' ' || r == '#' {
break
}
l.lexNext()
}
l.lexEmit(itemRuleType)
return ruleIdentState
}
func (p *textParser) handleRule() (*Rule, error) {
irule := &Rule{}
Loop:
for {
tok, done := p.l.lexNextToken()
if done {
break Loop
}
switch tok.itype {
case itemEOF, itemRuleEnd:
break Loop
case itemComment:
continue
case itemRuleRuleset:
id, err := strconv.Atoi(tok.ivalue)
if err != nil {
return nil, err
}
irule.Ruleset = uint8(id)
case itemRuleMinSize:
id, err := strconv.Atoi(tok.ivalue)
if err != nil {
return nil, err
}
irule.MinSize = uint8(id)
case itemRuleMaxSize:
id, err := strconv.Atoi(tok.ivalue)
if err != nil {
return nil, err
}
irule.MaxSize = uint8(id)
case itemRuleStepBeg:
if istep, err := p.handleRuleStep(); err != nil {
return nil, err
} else {
istep.Num = int32(len(irule.Steps))
irule.Steps = append(irule.Steps, istep)
}
case itemRuleName:
irule.Name = tok.ivalue
case itemRuleType:
id, err := strconv.Atoi(tok.ivalue)
if err != nil {
switch tok.ivalue {
case "replicated":
irule.Type = ReplicatedPG
case "erasure":
irule.Type = ErasurePG
default:
return nil, errors.New("unknown rule type")
}
} else {
irule.Type = uint8(id)
}
case itemRuleID:
id, err := strconv.Atoi(tok.ivalue)
if err != nil {
return nil, err
}
irule.ID = uint8(id)
}
}
if irule.ID != irule.Ruleset {
irule.Ruleset = irule.ID
}
return irule, nil
}
func (p *textParser) handleRuleStep() (*Step, error) {
istep := &Step{}
Loop:
for {
tok, done := p.l.lexNextToken()
if done {
break Loop
}
switch tok.itype {
case itemEOF, itemRuleStepEnd:
break Loop
case itemComment:
continue
case itemRuleStepTake:
istep.Op = "take"
istep.Item = -1
case itemRuleStepTakeType:
istep.ItemName = tok.ivalue
case itemRuleStepTakeClass:
istep.ItemClass = tok.ivalue
case itemRuleStepChoose:
istep.Op = tok.ivalue
case itemRuleStepChooseIndep:
istep.Op = fmt.Sprintf("%s_%s", istep.Op, "indep")
id, err := strconv.Atoi(tok.ivalue)
if err != nil {
return nil, err
}
istep.Num = int32(id)
case itemRuleStepChooseFirstN:
istep.Op = fmt.Sprintf("%s_%s", istep.Op, "firstn")
id, err := strconv.Atoi(tok.ivalue)
if err != nil {
return nil, err
}
istep.Num = int32(id)
case itemRuleStepChooseType:
istep.ItemType = tok.ivalue
case itemRuleStepEmit:
istep.Op = "emit"
}
}
return istep, nil
}

16
crushmap/text_space.go Normal file
View File

@@ -0,0 +1,16 @@
package crushmap
import "fmt"
func spaceState(l *lex) stateFn {
r := l.lexNext()
if r != ' ' && r != '\t' && r != '\n' && r != '\r' {
l.lexErr(fmt.Sprintf("unexpected token %q", r))
return nil
}
l.lexTake(" \t")
l.lexIgnore()
return l.lexPop()
}

38
crushmap/text_stack.go Normal file
View File

@@ -0,0 +1,38 @@
package crushmap
type runeNode struct {
r rune
next *runeNode
}
type runeStack struct {
start *runeNode
}
func newRuneStack() runeStack {
return runeStack{}
}
func (s *runeStack) push(r rune) {
node := &runeNode{r: r}
if s.start == nil {
s.start = node
} else {
node.next = s.start
s.start = node
}
}
func (s *runeStack) pop() rune {
if s.start == nil {
return EOFRune
} else {
n := s.start
s.start = n.next
return n.r
}
}
func (s *runeStack) clear() {
s.start = nil
}

20
crushmap/text_test.go Normal file
View File

@@ -0,0 +1,20 @@
package crushmap
import (
"io/ioutil"
"testing"
)
func TestText(t *testing.T) {
buf, err := ioutil.ReadFile("testdata/map.txt")
if err != nil {
t.Fatal(err)
}
m := NewMap()
err = m.DecodeText(buf)
if err != nil {
t.Fatal(err)
}
_ = m
}

100
crushmap/text_tunable.go Normal file
View File

@@ -0,0 +1,100 @@
package crushmap
import (
"errors"
"fmt"
"strconv"
)
func tunableState(l *lex) stateFn {
l.lexIgnore()
if r := l.lexPeek(); r != ' ' {
l.lexErr(fmt.Sprintf("unexpected token %q", r))
return l.lexPop()
}
l.lexEmit(itemTunableBeg)
return tunableKeyState
}
func tunableKeyState(l *lex) stateFn {
l.lexTake(" \t")
l.lexIgnore()
loop:
for {
r := l.lexPeek()
switch r {
case '\n', '#':
l.lexErr(fmt.Sprintf("unexpected token %q", r))
return l.lexPop()
case ' ':
break loop
default:
l.lexNext()
}
}
l.lexEmit(itemTunableKey)
return tunableValState
}
func tunableValState(l *lex) stateFn {
l.lexTake(" \t")
l.lexIgnore()
loop:
for {
r := l.lexPeek()
switch r {
case '\n', '#', ' ':
break loop
default:
l.lexNext()
}
}
l.lexEmit(itemTunableVal)
l.lexEmit(itemTunableEnd)
return l.lexPop()
}
func (p *textParser) handleTunable() (string, interface{}, error) {
var key string
var val interface{}
Loop:
for {
tok, done := p.l.lexNextToken()
if done {
break Loop
}
switch tok.itype {
case itemEOF, itemTunableEnd:
break Loop
case itemComment:
continue
case itemTunableKey:
key = tok.ivalue
case itemTunableVal:
id, err := strconv.Atoi(tok.ivalue)
if err != nil {
val = tok.ivalue
} else {
val = id
}
}
}
if key == "" {
return "", nil, errors.New("invalid tunable")
}
return key, val, nil
}

75
crushmap/text_type.go Normal file
View File

@@ -0,0 +1,75 @@
package crushmap
import (
"errors"
"strconv"
)
func typeState(l *lex) stateFn {
l.lexEmit(itemTypeBeg)
return typeIDState
}
func typeIDState(l *lex) stateFn {
l.lexTake(" \t")
l.lexIgnore()
for {
r := l.lexPeek()
if r == '\n' || r == ' ' || r == '#' {
break
}
l.lexNext()
}
l.lexEmit(itemTypeID)
return typeNameState
}
func typeNameState(l *lex) stateFn {
l.lexTake(" \t")
l.lexIgnore()
for {
r := l.lexPeek()
if r == '\n' || r == ' ' || r == '#' {
break
}
l.lexNext()
}
l.lexEmit(itemTypeName)
l.lexEmit(itemTypeEnd)
return l.lexPop()
}
func (p *textParser) handleType() (*Type, error) {
itype := &Type{ID: -1}
Loop:
for {
tok, done := p.l.lexNextToken()
if done {
break Loop
}
switch tok.itype {
case itemEOF, itemTypeEnd:
break Loop
case itemComment:
continue
case itemTypeID:
id, err := strconv.Atoi(tok.ivalue)
if err != nil {
return nil, err
}
itype.ID = int32(id)
case itemTypeName:
itype.Name = tok.ivalue
}
}
if itype.Name == "" {
return nil, errors.New("invalid type")
}
return itype, nil
}