Skip to content

Commit

Permalink
Add aggregate option (#7)
Browse files Browse the repository at this point in the history
* Add csv option (default: json)

* Add contribution type

* output page author name if contrib file exists

* format
  • Loading branch information
kondoumh authored Nov 21, 2020
1 parent f7787f3 commit b1d2376
Show file tree
Hide file tree
Showing 3 changed files with 65 additions and 25 deletions.
55 changes: 30 additions & 25 deletions cmd/aggregate.go
Original file line number Diff line number Diff line change
@@ -1,9 +1,12 @@
package cmd

import (
"encoding/json"
"fmt"
"os"

"github.com/mamezou-tech/sbgraph/pkg/file"

"github.com/cheggaaa/pb/v3"
"github.com/mamezou-tech/sbgraph/pkg/types"
"github.com/spf13/cobra"
Expand All @@ -18,34 +21,28 @@ var aggregateCmd = &cobra.Command{
sbf aggregate
CSV will be created at '<WorkDir>/<project name>.csv'.
JSON will be created as '<WorkDir>/<project name>_ag.json'.
If the csv flag is specified, CSV will be created as '<WorkDir>/project name>_ag.csv'.
`),
Run: func(cmd *cobra.Command, args []string) {
doAggregate(cmd)
},
}

func init() {
aggregateCmd.PersistentFlags().BoolP("csv", "s", false, "Output as CSV")
rootCmd.AddCommand(aggregateCmd)
}

type contribute struct {
UserID string
UserName string
PagesCreated int
PagesContributed int
ViewsCreatedPages int
LinksCreatedPages int
}

func doAggregate(cmd *cobra.Command) {
csv, _ := cmd.PersistentFlags().GetBool("csv")
projectName := config.CurrentProject
CheckProject(projectName)
fmt.Printf("Aggregate project : %s\n", projectName)
var proj types.Project
err := proj.ReadFrom(projectName, config.WorkDir)
CheckErr(err)
contrib := map[string]contribute{}
contrib := map[string]types.Contribution{}
bar := pb.StartNew(proj.Count)
for _, idx := range proj.Pages {
var page types.Page
Expand All @@ -58,7 +55,7 @@ func doAggregate(cmd *cobra.Command) {
p.LinksCreatedPages += page.Linked
contrib[page.Author.ID] = p
} else {
c := contribute{
c := types.Contribution{
UserID: page.Author.ID,
UserName: page.Author.DisplayName,
PagesContributed: 1,
Expand All @@ -73,7 +70,7 @@ func doAggregate(cmd *cobra.Command) {
p.PagesContributed++
contrib[user.ID] = p
} else {
c := contribute{
c := types.Contribution{
UserID: user.ID,
UserName: user.DisplayName,
PagesContributed: 1,
Expand All @@ -84,24 +81,32 @@ func doAggregate(cmd *cobra.Command) {
bar.Increment()
}
bar.Finish()
err = writeContrib(projectName, contrib)
err = writeContrib(projectName, contrib, csv)
CheckErr(err)
}

func writeContrib(projectName string, contrib map[string]contribute) error {
path := fmt.Sprintf("%s/%s.csv", config.WorkDir, projectName)
file, err := os.Create(path)
if err != nil {
return err
}
defer file.Close()
file.Write(([]byte)("User Name,Pages Created,Pages Contributed,Views of Created Pages,Links of Created Pages\n"))
for _, v := range contrib {
data := fmt.Sprintf("%s,%d,%d,%d,%d\n", v.UserName, v.PagesCreated, v.PagesContributed, v.ViewsCreatedPages, v.LinksCreatedPages)
_, err = file.Write(([]byte)(data))
func writeContrib(projectName string, contrib map[string]types.Contribution, csv bool) error {
if csv {
path := fmt.Sprintf("%s/%s_contrib.csv", config.WorkDir, projectName)
fmt.Println(path)
file, err := os.Create(path)
if err != nil {
return err
}
defer file.Close()
file.Write(([]byte)("User ID, User Name,Pages Created,Pages Contributed,Views of Created Pages,Links of Created Pages\n"))
for _, v := range contrib {
data := fmt.Sprintf("%s,%s,%d,%d,%d,%d\n", v.UserID, v.UserName, v.PagesCreated, v.PagesContributed, v.ViewsCreatedPages, v.LinksCreatedPages)
_, err = file.Write(([]byte)(data))
if err != nil {
return err
}
}
} else {
data, _ := json.Marshal(contrib)
if err := file.WriteBytes(data, projectName+"_contrib.json", config.WorkDir); err != nil {
return err
}
}
return nil
}
25 changes: 25 additions & 0 deletions cmd/fetch.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ package cmd
import (
"encoding/json"
"fmt"
"os"
"sync"
"time"

Expand Down Expand Up @@ -107,6 +108,17 @@ func fetchPageList(project types.Project) error {
pages = append(pages, page)
}
}
contrib, err := readContrib()
if err != nil {
return err
}
for idx, page := range pages {
u, contains := contrib[page.Author.ID]
if contains {
pages[idx].Author.Name = u.UserName
pages[idx].Author.DisplayName = u.UserName
}
}
project.Pages = pages
jst, _ := time.LoadLocation("Asia/Tokyo")
project.Date = time.Now().In(jst).Format(timeLayout)
Expand All @@ -117,6 +129,19 @@ func fetchPageList(project types.Project) error {
return nil
}

func readContrib() (map[string]types.Contribution, error) {
contrib := map[string]types.Contribution{}
bytes, err := file.ReadBytes(config.CurrentProject+"_contrib.json", config.WorkDir)
if os.IsNotExist(err) {
return contrib, nil
}
if err != nil {
return contrib, err
}
err = json.Unmarshal(bytes, &contrib)
return contrib, err
}

func dividePagesList(multiplicity int, projectName string) ([][]types.Page, error) {
var divided [][]types.Page
var proj types.Project
Expand Down
10 changes: 10 additions & 0 deletions pkg/types/page.go
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,16 @@ type User struct {
PagesCreated []string
}

// Contribution represents summary of the user's contribution
type Contribution struct {
UserID string `json:"userId"`
UserName string `json:"userName"`
PagesCreated int `json:"pagesCreated"`
PagesContributed int `json:"pagesContributed"`
ViewsCreatedPages int `json:"viewsCreatedPages"`
LinksCreatedPages int `json:"linksCreatedPages"`
}

// ReadFrom will deserialize Project from file
func (page *Page) ReadFrom(projectName string, id string, workDir string) error {
bytes, err := file.ReadBytes(id+".json", workDir+"/"+projectName)
Expand Down

0 comments on commit b1d2376

Please sign in to comment.