Added some status Printlns.

Added parametric header skipping in processTrialBalance in csv.go.
Began earnest work on a sustainable verizon pipeline.
master
dtookey 4 years ago
parent 791ca84414
commit 6fd8e8bd39

@ -65,12 +65,13 @@ func GenerateArAgingReport(pathlike string) {
res = append(res, part)
}
}
log.Println("Updating database")
db.BulkUpdate[arReportInputLine](connector, "mercury", "update-mercury-arReport.sql", &res, arReportInputLineMappingFunction)
log.Println("Updates finished.")
}
func processArReport(pathlike string) *[]*arReportInputLine {
log.Printf("Processing AR Report for: %s\n", pathlike)
res := make([]*arReportInputLine, 0, 500)
contents := readCsv(pathlike)
for _, row := range *contents {

@ -50,7 +50,7 @@ func ProcessTrialBalances(pathlikeIn string, pathlikeOut string) {
paths := enumFiles(pathlikeIn)
table := make([]*[]*TrialBalanceLine, 0, 100)
for _, p := range *paths {
table = append(table, processTrialBalance(p))
table = append(table, processTrialBalance(p, true))
}
writeTable(&table, pathlikeOut)
}
@ -76,7 +76,7 @@ func formatDate(period string) string {
return fmt.Sprintf("20%s-%s-%s", year, month, date)
}
func processTrialBalance(pathlike *string) *[]*TrialBalanceLine {
func processTrialBalance(pathlike *string, skipHeaders bool) *[]*TrialBalanceLine {
ret := make([]*TrialBalanceLine, 0, 50)
file, err := os.OpenFile(*pathlike, os.O_RDONLY, 0755)
if err != nil {
@ -90,10 +90,19 @@ func processTrialBalance(pathlike *string) *[]*TrialBalanceLine {
}
period := getDateFromFileName(pathlike)
skipped := false
buff := make([]rune, 500)
for _, row := range table {
ret = append(ret, rowBufferToBalanceLine(&row, &buff, period))
if !skipped && skipHeaders {
skipped = true
continue
}
rowBuffer, err := rowBufferToBalanceLine(&row, &buff, period)
if err != nil {
log.Printf("error found in %s\n", *pathlike)
panic(err)
}
ret = append(ret, rowBuffer)
}
return &ret
}
@ -104,25 +113,32 @@ func getDateFromFileName(pathlike *string) string {
return date
}
func getAccountTypeFromName(accountName string) string {
func getAccountTypeFromName(accountName string) (string, error) {
parts := strings.Split(accountName, " ")
if parts[0] != "" {
}
number, err := strconv.Atoi(parts[0])
if err != nil {
log.Panic(err)
return "", err
}
if number < expenseAccountCutoff {
return "income"
return "income", nil
} else {
return "expense"
return "expense", nil
}
}
func rowBufferToBalanceLine(row *[]string, buffer *[]rune, date string) *TrialBalanceLine {
func rowBufferToBalanceLine(row *[]string, buffer *[]rune, date string) (*TrialBalanceLine, error) {
if strings.Index((*row)[0], ":") > -1 {
return revenueRowBufferToBalanceLine(row, buffer, date)
return revenueRowBufferToBalanceLine(row, buffer, date), nil
} else {
return expenseRowBufferToBalanceLine(row, buffer, date)
row, err := expenseRowBufferToBalanceLine(row, buffer, date)
if err != nil {
return nil, err
}
return row, nil
}
}
@ -148,11 +164,14 @@ func revenueRowBufferToBalanceLine(row *[]string, buffer *[]rune, date string) *
balance.AccountName = target
balance.Amount = amount
balance.Period = formatDate(date)
balance.AccountType = getAccountTypeFromName(target)
balance.AccountType, err = getAccountTypeFromName(target)
if err != nil {
panic(err)
}
return &balance
}
func expenseRowBufferToBalanceLine(row *[]string, buffer *[]rune, date string) *TrialBalanceLine {
func expenseRowBufferToBalanceLine(row *[]string, buffer *[]rune, date string) (*TrialBalanceLine, error) {
balance := TrialBalanceLine{}
target := (*row)[0]
dotIdx := strings.IndexRune(target, DotRune)
@ -180,8 +199,11 @@ func expenseRowBufferToBalanceLine(row *[]string, buffer *[]rune, date string) *
balance.AccountName = target
balance.Amount = amount
balance.Period = formatDate(date)
balance.AccountType = getAccountTypeFromName(target)
return &balance
balance.AccountType, err = getAccountTypeFromName(target)
if err != nil {
return nil, err
}
return &balance, nil
}
func ProcessCSVsFromPath(pathlikeIn string, pathlikeOut string) {

@ -1,10 +1,11 @@
package main
import (
"fmt"
"log"
"mercury/src/finance"
"mercury/src/mercury"
"mercury/src/projectClarity"
"mercury/src/telecom"
"os"
"time"
)
@ -12,10 +13,10 @@ import (
func main() {
s := time.Now()
//processQbBilling()
updateInsightData()
//processQBARReport()
//updateInsightData()
//test()
test()
f := time.Now()
log.Println(f.Sub(s).Milliseconds())
@ -23,11 +24,17 @@ func main() {
}
func test() {
db := projectClarity.NewClarityDatabase()
events := db.GetLifecycleEvents()
for _, event := range *events {
log.Printf("%#v\n", *event)
}
csv := telecom.NewVerizonCSV("/home/dtookey/work/clarity-reporting/telecom/call-log-6-7-2022.csv")
results := telecom.ProcessVerizonCsvToObjects(csv, telecom.RowToVerizonCallLine, func(row []string) bool {
if row[5] == "Voice" {
return true
} else {
return false
}
})
fmt.Println(len(*results))
}
func updateInsightData() {
@ -35,15 +42,15 @@ func updateInsightData() {
icx.Init()
icx.UpdateUsers()
icx.UpdateTimeEntries()
//finance.GenerateArAgingReport("/home/dtookey/work/clarity-reporting/qb/ar/")
finance.GenerateArAgingReport("/home/dtookey/work/clarity-reporting/qb/ar/")
}
func processQbBilling() {
func processQBARReport() {
reportBase := os.Getenv("mercury_qb_path")
log.Printf("Searching for documents in %s\n", reportBase)
if len(reportBase) == 0 {
log.Fatalln("please set the mercury_qb_path env var. we don't know where to look otherwise")
}
finance.ProcessTrialBalances(reportBase, "./updateInsightData.csv")
finance.GenerateArAgingReport(reportBase)
//finance.ProcessTrialBalances(reportBase, "./updateInsightData.csv")
}

@ -0,0 +1,125 @@
package telecom
import (
"bytes"
"encoding/csv"
"io/ioutil"
"os"
"strconv"
)
/*
First step is building the reports on verizon's website. Below are the columns (and hopefully categories) that you need
to completely recreate the report.
Wireless number [mandatory]
"User name" [Contact Information]
Date [Voice Usage]
Minutes [voice usage]
Number [voice usage]
The report comes as a comma/LF formatted CSV. It has 13 lines of useless header data and 1 line of useless total data
that must be trimmed off to shove everything through a struct factory.
*/
type (
VerizonCSV struct {
trimmed bool
rawData *[]byte
TabularData *bytes.Buffer
}
VerizonCallLine struct {
WirelessNumber string
UserName string
CallDate string
CallMinutes int
OtherNumber string
UsageCategory string
}
)
func RowToVerizonCallLine(row []string) *VerizonCallLine {
minutes, err := strconv.Atoi(row[3])
if err != nil {
panic(err)
}
line := VerizonCallLine{row[0], row[1], row[2], minutes, row[4], row[5]}
return &line
}
func NewVerizonCSV(pathlike string) *VerizonCSV {
csv := VerizonCSV{trimmed: false}
file, err := os.OpenFile(pathlike, os.O_RDONLY, 777)
if err != nil {
panic(err)
}
defer file.Close()
payload, err := ioutil.ReadAll(file)
if err != nil {
panic(err)
}
csv.rawData = &payload
(&csv).trimHeadersAndFooters()
return &csv
}
func (v *VerizonCSV) trimHeadersAndFooters() {
if v.trimmed {
return
}
data := *v.rawData
lineCount := 0
headerIdx := -1
footerIdx := -1
for i, c := range data {
if c == '\n' {
lineCount += 1
if lineCount == 13 {
headerIdx = i + 1
break
}
}
}
for i := len(data) - 1; i >= 0; i-- {
c := data[i]
if c == 'n' {
footerIdx = i + 1
break
}
}
tabData := bytes.NewBuffer(data[headerIdx:footerIdx])
v.TabularData = tabData
v.trimmed = true
}
func ProcessVerizonCsvToObjects[K any](v *VerizonCSV, mappingFunction func([]string) *K, filterFunction func([]string) bool) *[]*K {
ret := make([]*K, 0, 1000)
doc := csv.NewReader(v.TabularData)
var filter func([]string) bool
if filterFunction == nil {
filter = func([]string) bool { return true }
} else {
filter = filterFunction
}
//this is going to read the entire thing line by line until it hits a parser error or the io.EOF error at the end of the buffer.
//we did it like this because verizon reports love to append jagged rows every so often, which makes the parser freak out.
//all the stuff we care about is in the top of the document so far, so this works for now. -dtookey 6/7/2022
for record, err := doc.Read(); err == nil; record, err = doc.Read() {
if filter(record) {
obj := mappingFunction(record)
ret = append(ret, obj)
}
}
return &ret
}
Loading…
Cancel
Save