diff --git a/run.sh b/run.sh index b12bcd7..8800b1f 100644 --- a/run.sh +++ b/run.sh @@ -4,8 +4,10 @@ cd /opt/go/src/mercury || exit rm build/* -go build -o ./build/mercury ./src/mercury.go +#go build -o ./build/mercury ./src/mercury.go +go build -o ./build/taxes ./src/taxes.go -mercury_qb_path="/home/dtookey/work/clarity-reporting/qb/" DB_CREDS="$(cat /home/dtookey/work/datastudio-db-creds.txt)" DB_HOST=data-connect.carolina.engineering ./build/mercury +#mercury_qb_path="/home/dtookey/work/clarity-reporting/qb/" DB_CREDS="$(cat /home/dtookey/work/datastudio-db-creds.txt)" DB_HOST=data-connect.carolina.engineering ./build/mercury +mercury_qb_path="/home/dtookey/work/clarity-reporting/qb/" DB_CREDS="$(cat /home/dtookey/work/datastudio-db-creds.txt)" DB_HOST=data-connect.carolina.engineering ./build/taxes diff --git a/src/db/database-primitives.go b/src/db/database-primitives.go index 42325e8..559a042 100644 --- a/src/db/database-primitives.go +++ b/src/db/database-primitives.go @@ -144,6 +144,7 @@ func createDbConnection(database string) *sql.DB { host := os.Getenv(dbCredsHostName) dbString := dsnTemplate connectString := fmt.Sprintf(dbString, cred, host, database) + fmt.Printf("Beginning connection to database: %s\n", connectString) db, err := sql.Open("mysql", connectString) if err != nil { diff --git a/src/hr/timesheets.go b/src/hr/timesheets.go new file mode 100644 index 0000000..9b17c37 --- /dev/null +++ b/src/hr/timesheets.go @@ -0,0 +1,337 @@ +package hr + +import ( + "encoding/csv" + "fmt" + "log" + "mercury/src/db" + "os" + "path" + "regexp" + "strconv" + "strings" + "time" +) + +type ( + HourReport struct { + FilePath string + Records *[][]string + SkipFirstRow bool + ReportLines *[]*HourReportLine + } + + HourReportLoadTask struct { + Records *[][]string + Err error + } + + HourReportLine struct { + PayGroup string + LName string + FName string + HomeDept string + ManagerName string + WeekEnding string + EEId int + Overtime float64 + Regular float64 + Sick float64 + Vacation float64 + Bereavement float64 + Service float64 + Total float64 + } +) + +var namePattern = regexp.MustCompile("^\\d{1,2}.\\d{1,2}.\\d{2}-(\\d{1,2}.\\d{1,2}.\\d{2}).*csv$") + +func loadReports(pathlikeBase string) *[]HourReport { + files, err := getAllFilesInDir(pathlikeBase) + reports := make([]HourReport, 0, 300) + + if err != nil { + panic(err) + } + for _, file := range *files { + sReport := NewHourReport(file, false) + reports = append(reports, *sReport) + } + + return &reports +} + +func UpdateTimesheetReport(pathlike string) { + connector := &db.ConnectorGeneric{} + + reports := loadReports(pathlike) + + tableWipe := db.NewRunner("create-mercury-hrTimesheets-table.sql", db.MercuryDatabaseName) + connector.ExecuteSqlScript(tableWipe) + for _, hourReport := range *reports { + log.Printf("Updating database\n") + db.BlockUpdate[HourReportLine](connector, db.MercuryDatabaseName, "update-mercury-hrTimesheets.sql", hourReport.ReportLines) + log.Printf("Updates finished.\n") + } + + tablePrune := db.NewRunner("update-mercury-hrTimesheetsCleanup.sql", db.MercuryDatabaseName) + connector.ExecuteSqlScript(tablePrune) +} + +func NewHourReport(pathlike string, skipFirstRow bool) *HourReport { + report := HourReport{ + FilePath: pathlike, + Records: nil, + SkipFirstRow: skipFirstRow, + } + + asyncChan := make(chan *HourReportLoadTask) + + go loadTimeSheet(report.FilePath, asyncChan) + recordStatus := <-asyncChan + if recordStatus.Err != nil { + fmt.Printf("Error in the following file: %s\n", report.FilePath) + panic(recordStatus.Err) + return nil + } + + report.Records = recordStatus.Records + + report.ReportLines = processReportToLines(report) + + return &report +} + +func processReportToLines(report HourReport) *[]*HourReportLine { + lines := make([]*HourReportLine, 0, 250) + localTable := *report.Records + headersRaw := (localTable)[0] + headers := make([]string, len(headersRaw), len(headersRaw)) + for i, v := range headersRaw { + key := strings.Trim(v, " \t\uFEFF") + headers[i] = key + } + + for i := 1; i < len(localTable); i++ { + row := localTable[i] + line := newHourReportLineFromRow(headers, row) + line.WeekEnding = fileNameToSQLDate(report.FilePath) + lines = append(lines, &line) + } + + return &lines +} + +func fileNameToSQLDate(fileName string) string { + name := path.Base(fileName) + parts := strings.Split(name, "_") + year := parts[0][:4] + month := parts[0][4:6] + date := parts[0][6:8] + return fmt.Sprintf("%s-%s-%s", year, month, date) +} + +// I hate that this has a ton of hard-coded stuff. I'm not sure if there's a way around it though +func newHourReportLineFromRow(headers []string, row []string) HourReportLine { + line := HourReportLine{} + if len(headers) != len(row) { + panic("header array and row array are different sizes in newHourReportLineFromRow") + } + //"Paygroup", "Last Name", "First Name", "Home Department", "Manager Name", "Worked DeptName", "OT", "Reg", "Sick", "Vac", "Total" + + for i, header := range headers { + strVal := row[i] + switch header { + case "Paygroup": + line.PayGroup = strVal + case "Last Name": + line.LName = strVal + case "First Name": + line.FName = strVal + case "Home Department": + line.HomeDept = strVal + case "Manager Name": + line.ManagerName = strVal + case "Worked DeptName": + continue + case "OT": + v, err := strconv.ParseFloat(strVal, 64) + if err != nil { + v = 0 + } + line.Overtime = v + + case "Reg": + + v, err := strconv.ParseFloat(strVal, 64) + if err != nil { + v = 0 + } + line.Regular = v + + case "Sick": + + v, err := strconv.ParseFloat(strVal, 64) + if err != nil { + v = 0 + } + line.Sick = v + + case "Vac": + + v, err := strconv.ParseFloat(strVal, 64) + if err != nil { + v = 0 + } + line.Vacation = v + + case "Total": + + v, err := strconv.ParseFloat(strVal, 64) + if err != nil { + v = 0 + } + line.Total = v + + } + } + + return line +} + +func loadTimeSheet(pathlike string, asyncChan chan<- *HourReportLoadTask) { + f, err := os.OpenFile(pathlike, os.O_RDONLY, 0755) + if err != nil { + asyncChan <- &HourReportLoadTask{nil, err} + close(asyncChan) + return + } + defer f.Close() + + reader := csv.NewReader(f) + records, err := reader.ReadAll() + + if err != nil { + asyncChan <- &HourReportLoadTask{nil, err} + close(asyncChan) + return + } + + asyncChan <- &HourReportLoadTask{&records, nil} + close(asyncChan) +} + +func (line HourReportLine) ToQueryBlock() string { + return fmt.Sprintf( + "('%s','%s','%s','%s','%s','%s','%d','%f','%f','%f','%f','%f','%f','%f')", + line.PayGroup, + line.LName, + line.FName, + line.HomeDept, + line.ManagerName, + line.WeekEnding, + line.EEId, + line.Overtime, + line.Regular, + line.Sick, + line.Vacation, + line.Bereavement, + line.Service, + line.Total, + ) +} + +//deprecated +func rename(report HourReport) { + fileName := path.Base(report.FilePath) + if namePattern.MatchString(fileName) { + idx := namePattern.FindAllStringSubmatch(fileName, -1) + parts := strings.Split(idx[0][1], ".") + year, err := strconv.Atoi(parts[2]) + if err != nil { + panic(err) + } + month, err := strconv.Atoi(parts[0]) + if err != nil { + panic(err) + } + date, err := strconv.Atoi(parts[1]) + if err != nil { + panic(err) + } + + full := fmt.Sprintf("20%02d%02d%02d", year, month, date) + tStamp := time.Date(year, getMonth(month), date, 10, 0, 0, 0, time.UTC) + _, week := tStamp.ISOWeek() + fileName := fmt.Sprintf("%s_Paycor_W%d.csv", full, week) + + err = copyFile(report.FilePath, fileName) + if err != nil { + panic(err) + } + } else { + fmt.Printf("Couldn't find match for %s\n", fileName) + } +} + +func getMonth(month int) time.Month { + switch month { + case 1: + return time.January + case 2: + return time.February + case 3: + return time.March + case 4: + return time.April + case 5: + return time.May + case 6: + return time.June + case 7: + return time.July + case 8: + return time.August + case 9: + return time.September + case 10: + return time.October + case 11: + return time.November + case 12: + return time.December + default: + return time.January + } +} + +func getAllFilesInDir(pathlikeBase string) (*[]string, error) { + listing, err := os.ReadDir(pathlikeBase) + res := make([]string, 0, 300) + if err != nil { + return nil, err + } + for _, list := range listing { + if list.IsDir() || path.Ext(list.Name()) != ".csv" { + fmt.Printf("Skipping: %s\n", list.Name()) + continue + } else { + res = append(res, path.Join(pathlikeBase, list.Name())) + } + } + return &res, nil +} + +func copyFile(inPath string, outpath string) error { + outPathBase := "/home/dtookey/work/clarity-reporting/pcorrect" + outFinal := path.Join(outPathBase, outpath) + b, err := os.ReadFile(inPath) + if err != nil { + return err + } + err = os.WriteFile(outFinal, b, 0755) + if err != nil { + return err + } + return nil +} diff --git a/src/mercury.go b/src/mercury.go index 30bd94f..302bb2b 100644 --- a/src/mercury.go +++ b/src/mercury.go @@ -4,6 +4,7 @@ import ( "log" "mercury/src/db" "mercury/src/finance" + "mercury/src/hr" "mercury/src/mercury" "os" "time" @@ -12,10 +13,13 @@ import ( func main() { s := time.Now() + // local/update run + //updateTelecom() //processQBARReport() - updateInsightData() - //updateTelecom() + // regular run + //updateInsightData() + updateHR() f := time.Now() log.Println(f.Sub(s).Milliseconds()) @@ -29,6 +33,10 @@ func updateTelecom() { icx.UpdateVerizonReports() } +func updateHR() { + hr.UpdateTimesheetReport("/home/dtookey/work/clarity-reporting/paycor") +} + func updateInsightData() { icx := mercury.NewInterconnect() icx.Init() diff --git a/src/mercury/Interconnect.go b/src/mercury/Interconnect.go index 9cdbb44..471de9e 100644 --- a/src/mercury/Interconnect.go +++ b/src/mercury/Interconnect.go @@ -24,7 +24,7 @@ func NewInterconnect() *Interconnect { return &connect } -// PseudoInit This is a stupid idea in order to fix the fact that we obliterate tables left-and righta t processing time +// PseudoInit This is a stupid idea in order to fix the fact that we obliterate tables left-and right at processing time func (ic *Interconnect) PseudoInit() { ic.InsightDBConnector = projectInsight.NewDBConnection() } diff --git a/src/projectClarity/clarity-database.go b/src/projectClarity/clarity-database.go index 8daf2a0..b3179c3 100644 --- a/src/projectClarity/clarity-database.go +++ b/src/projectClarity/clarity-database.go @@ -19,6 +19,12 @@ type ( NewValue string Modifier int32 } + + ClarityProjectBilling struct { + Refnum string + Location string + Fee float64 + } ) // diff --git a/src/sql/create-mercury-hrTimesheets-table.sql b/src/sql/create-mercury-hrTimesheets-table.sql new file mode 100644 index 0000000..d34be50 --- /dev/null +++ b/src/sql/create-mercury-hrTimesheets-table.sql @@ -0,0 +1,19 @@ +DROP TABLE IF EXISTS mercury.hr_timesheet_report; + +CREATE TABLE mercury.hr_timesheet_report +( + PayGroup VARCHAR(150), + LName VARCHAR(150), + FName VARCHAR(150), + HomeDept VARCHAR(150), + ManagerName VARCHAR(150), + WeekEnding VARCHAR(150), + EEId INT, + Overtime REAL, + Regular REAL, + Sick REAL, + Vacation REAL, + Bereavement REAL, + Service REAL, + Total REAL +); \ No newline at end of file diff --git a/src/sql/read-clarity-billingProjects.sql b/src/sql/read-clarity-billingProjects.sql new file mode 100644 index 0000000..f94095d --- /dev/null +++ b/src/sql/read-clarity-billingProjects.sql @@ -0,0 +1,3 @@ +SELECT refnum, location, IF(price_override = 1, override_price, default_price) as fee +FROM projects.all_projects + inner join billing b on all_projects.refnum = b.refNumber; \ No newline at end of file diff --git a/src/sql/update-mercury-hrTimesheets.sql b/src/sql/update-mercury-hrTimesheets.sql new file mode 100644 index 0000000..df971b2 --- /dev/null +++ b/src/sql/update-mercury-hrTimesheets.sql @@ -0,0 +1,3 @@ +INSERT INTO mercury.hr_timesheet_report (PayGroup, LName, FName, HomeDept, ManagerName, WeekEnding, EEId, Overtime, Regular, + Sick, Vacation, Bereavement, Service, Total) +VALUES %s; \ No newline at end of file diff --git a/src/sql/update-mercury-hrTimesheetsCleanup.sql b/src/sql/update-mercury-hrTimesheetsCleanup.sql new file mode 100644 index 0000000..964ceb0 --- /dev/null +++ b/src/sql/update-mercury-hrTimesheetsCleanup.sql @@ -0,0 +1 @@ +DELETE FROM mercury.hr_timesheet_report where FName = '' and LName = ''; \ No newline at end of file diff --git a/src/taxes.go b/src/taxes.go new file mode 100644 index 0000000..1fc9581 --- /dev/null +++ b/src/taxes.go @@ -0,0 +1,114 @@ +package main + +import ( + "database/sql" + "encoding/csv" + "fmt" + "log" + "mercury/src/db" + "mercury/src/mercury" + "mercury/src/projectClarity" + "os" + "regexp" + "strconv" +) + +var ( + projectPattern = regexp.MustCompile("^[A-z]{3}\\d{7}$") + locationPattern = regexp.MustCompile(".*, ([NSC]{2})") +) + +func main() { + csvPath := "/home/dtookey/work/clarity-reporting/billing2021.CSV" + outPath := "/home/dtookey/work/clarity-reporting/extended-billing2021.csv" + records := open(csvPath) + projectMap := getClarityData() + augmentedData := augmentData(records, *projectMap) + write(augmentedData, outPath) +} + +func getClarityData() *map[string]projectClarity.ClarityProjectBilling { + cdb := mercury.NewInterconnect() + cdb.PseudoInit() + cb := func(rows *sql.Rows) *projectClarity.ClarityProjectBilling { + container := projectClarity.ClarityProjectBilling{} + err := rows.Scan(&container.Refnum, &container.Location, &container.Fee) + if err != nil { + log.Panicln(err) + } + return &container + } + rMap := make(map[string]projectClarity.ClarityProjectBilling) + thing := db.QueryForObjects[projectClarity.ClarityProjectBilling](cdb.InsightDBConnector.ConnectorGeneric, db.ClarityDatabaseName, "read-clarity-billingProjects.sql", cb) + for _, thing := range *thing { + rMap[thing.Refnum] = *thing + } + + return &rMap +} + +func augmentData(data *[][]string, clarityData map[string]projectClarity.ClarityProjectBilling) *[][]string { + results := make([][]string, 0, 10000) + longest := "" + for _, row := range *data { + refNum := row[3] + nRow := row + if projectPattern.MatchString(refNum) { + project := clarityData[refNum] + deciString := strconv.FormatFloat(project.Fee, 'f', 2, 64) + if len(project.Location) > len(longest) { + longest = project.Location + } + + nRow = append(nRow, project.Location) + nRow = append(nRow, deciString) + b := []byte(project.Location) + if locationPattern.Match(b) { + idx := locationPattern.FindAllSubmatchIndex(b, -1)[0] + fmt.Printf("%#v\n", idx) + state := string(b[idx[2]:idx[3]]) + fmt.Println(state) + nRow = append(nRow, state) + } else { + nRow = append(nRow, "") + } + } else { + nRow = append(nRow, "") + nRow = append(nRow, "") + nRow = append(nRow, "") + } + results = append(results, nRow) + } + fmt.Println(longest) + + return &results +} + +func write(data *[][]string, pathlike string) { + f, err := os.Create(pathlike) + if err != nil { + panic(err) + } + writer := csv.NewWriter(f) + for _, row := range *data { + err := writer.Write(row) + if err != nil { + panic(err) + } + } + writer.Flush() +} + +func open(pathlike string) *[][]string { + f, err := os.OpenFile(pathlike, os.O_RDONLY, 0755) + if err != nil { + panic(err) + } + defer f.Close() + reader := csv.NewReader(f) + records, err := reader.ReadAll() + if err != nil { + panic(err) + } + return &records +} diff --git a/src/test.go b/src/test.go index 7fccb73..67342d1 100644 --- a/src/test.go +++ b/src/test.go @@ -1,12 +1,9 @@ package main -import ( - "mercury/src/finance" - "mercury/src/mercury" -) +import "mercury/src/hr" func main() { - ic := mercury.NewInterconnect() - ic.ResetTables() - finance.GenerateArAgingReport("/home/dtookey/work/clarity-reporting/qb/ar/") + reportBase := "/home/dtookey/work/clarity-reporting/paycor/" + hr.LoadReports(reportBase) + }