|
|
|
|
@ -1,8 +1,11 @@
|
|
|
|
|
package hr
|
|
|
|
|
|
|
|
|
|
import (
|
|
|
|
|
"bytes"
|
|
|
|
|
"encoding/csv"
|
|
|
|
|
"fmt"
|
|
|
|
|
"geniuscartel.xyz/csvmagic"
|
|
|
|
|
"io/ioutil"
|
|
|
|
|
"log"
|
|
|
|
|
"mercury/src/db"
|
|
|
|
|
"mercury/src/mercuryUtil"
|
|
|
|
|
@ -15,12 +18,17 @@ import (
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
type (
|
|
|
|
|
HourReport struct {
|
|
|
|
|
HourReportLegacy struct {
|
|
|
|
|
FilePath string
|
|
|
|
|
Records *[][]string
|
|
|
|
|
SkipFirstRow bool
|
|
|
|
|
ReportLines *[]*HourReportLineLegacy
|
|
|
|
|
}
|
|
|
|
|
HourReport struct {
|
|
|
|
|
FilePath string
|
|
|
|
|
SkipFirstRow bool
|
|
|
|
|
ReportLines *[]*HourReportLine
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
EmployeeDirectory struct {
|
|
|
|
|
FilePath string
|
|
|
|
|
@ -56,14 +64,15 @@ type (
|
|
|
|
|
//
|
|
|
|
|
// Badge #,Brv,Hol,OT,Reg,Service,Sick,Total
|
|
|
|
|
HourReportLine struct {
|
|
|
|
|
EEid int
|
|
|
|
|
Bereavement float64
|
|
|
|
|
Holiday float64
|
|
|
|
|
Overtime float64
|
|
|
|
|
Regular float64
|
|
|
|
|
Service float64
|
|
|
|
|
Sick float64
|
|
|
|
|
Total float64
|
|
|
|
|
EEid int `csv:"Badge #"`
|
|
|
|
|
Bereavement float64 `csv:"Brv"`
|
|
|
|
|
Holiday float64 `csv:"Hol"`
|
|
|
|
|
Overtime float64 `csv:"OT"`
|
|
|
|
|
Regular float64 `csv:"Reg"`
|
|
|
|
|
Service float64 `csv:"Service"`
|
|
|
|
|
Sick float64 `csv:"Sick"`
|
|
|
|
|
Vacation float64 `csv:"Vac"`
|
|
|
|
|
Total float64 `csv:"Total"`
|
|
|
|
|
WeekEnding string
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@ -79,6 +88,13 @@ type (
|
|
|
|
|
}
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
func (h HourReportLine) ToQueryBlock() string {
|
|
|
|
|
//EEId, Bereavement, Holiday, Overtime, Regular,
|
|
|
|
|
// Service, Sick, Vacation, Total, week_of
|
|
|
|
|
return fmt.Sprintf("('%d',%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,'%s')",
|
|
|
|
|
h.EEid, h.Bereavement, h.Holiday, h.Overtime, h.Regular, h.Service, h.Sick, h.Vacation, h.Total, h.WeekEnding)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
var namePattern = regexp.MustCompile("^\\d{1,2}.\\d{1,2}.\\d{2}-(\\d{1,2}.\\d{1,2}.\\d{2}).*csv$")
|
|
|
|
|
|
|
|
|
|
func NewDirectoryReport(pathlike string, skipFirstRow bool) *EmployeeDirectory {
|
|
|
|
|
@ -113,6 +129,7 @@ func loadReports(pathlikeBase string) *[]HourReport {
|
|
|
|
|
panic(err)
|
|
|
|
|
}
|
|
|
|
|
for _, file := range *files {
|
|
|
|
|
sanitizeFile(file)
|
|
|
|
|
sReport := NewHourReport(file, false)
|
|
|
|
|
reports = append(reports, *sReport)
|
|
|
|
|
}
|
|
|
|
|
@ -120,16 +137,26 @@ func loadReports(pathlikeBase string) *[]HourReport {
|
|
|
|
|
return &reports
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func sanitizeFile(pathlike string) {
|
|
|
|
|
b, err := ioutil.ReadFile(pathlike)
|
|
|
|
|
if err != nil {
|
|
|
|
|
panic(err)
|
|
|
|
|
}
|
|
|
|
|
rep := bytes.ReplaceAll(b, []byte{239, 187, 191}, []byte{})
|
|
|
|
|
ioutil.WriteFile(pathlike, rep, 0755)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func UpdateTimesheetReport(pathlike string) {
|
|
|
|
|
connector := &db.ConnectorGeneric{}
|
|
|
|
|
|
|
|
|
|
reports := loadReports(pathlike)
|
|
|
|
|
|
|
|
|
|
tableWipe := db.NewRunner("create-mercury-hrTimesheets-table.sql", db.MercuryDatabaseName)
|
|
|
|
|
tableWipe := db.NewRunner("create-mercury-hrPaycorHours-table.sql", db.MercuryDatabaseName)
|
|
|
|
|
connector.ExecuteSqlScript(tableWipe)
|
|
|
|
|
|
|
|
|
|
for _, hourReport := range *reports {
|
|
|
|
|
log.Printf("Updating database\n")
|
|
|
|
|
db.BlockUpdate[HourReportLineLegacy](connector, db.MercuryDatabaseName, "update-mercury-hrTimesheets.sql", hourReport.ReportLines)
|
|
|
|
|
db.BlockUpdate[HourReportLine](connector, db.MercuryDatabaseName, "update-mercury-hrPaycorHours.sql", hourReport.ReportLines)
|
|
|
|
|
log.Printf("Updates finished.\n")
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@ -152,28 +179,37 @@ func UpdateEmployeeDirectory(pathlike string) {
|
|
|
|
|
func NewHourReport(pathlike string, skipFirstRow bool) *HourReport {
|
|
|
|
|
report := HourReport{
|
|
|
|
|
FilePath: pathlike,
|
|
|
|
|
Records: nil,
|
|
|
|
|
SkipFirstRow: skipFirstRow,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
asyncChan := make(chan *HourReportLoadTask)
|
|
|
|
|
//asyncChan := make(chan *HourReportLoadTask)
|
|
|
|
|
|
|
|
|
|
go loadTimeSheet(report.FilePath, asyncChan)
|
|
|
|
|
recordStatus := <-asyncChan
|
|
|
|
|
if recordStatus.Err != nil {
|
|
|
|
|
fmt.Printf("Error in the following file: %s\n", report.FilePath)
|
|
|
|
|
panic(recordStatus.Err)
|
|
|
|
|
return nil
|
|
|
|
|
//go loadTimeSheet(report.FilePath, asyncChan)
|
|
|
|
|
//recordStatus := <-asyncChan
|
|
|
|
|
//if recordStatus.Err != nil {
|
|
|
|
|
// fmt.Printf("Error in the following file: %s\n", report.FilePath)
|
|
|
|
|
// panic(recordStatus.Err)
|
|
|
|
|
// return nil
|
|
|
|
|
//}
|
|
|
|
|
//
|
|
|
|
|
//report.Records = recordStatus.Records
|
|
|
|
|
|
|
|
|
|
processed := *csvmagic.LoadCsvAsObjects[HourReportLine](pathlike, nil)
|
|
|
|
|
clean := make([]*HourReportLine, len(processed), len(processed))
|
|
|
|
|
weekOf := fileNameToSQLDate(pathlike)
|
|
|
|
|
for i := range processed {
|
|
|
|
|
v := processed[i]
|
|
|
|
|
v.WeekEnding = weekOf
|
|
|
|
|
clean[i] = &v
|
|
|
|
|
}
|
|
|
|
|
report.ReportLines = &clean
|
|
|
|
|
|
|
|
|
|
report.Records = recordStatus.Records
|
|
|
|
|
|
|
|
|
|
report.ReportLines = processReportToLines(report)
|
|
|
|
|
//report.ReportLines = processReportToLines(report)
|
|
|
|
|
|
|
|
|
|
return &report
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func processReportToLines(report HourReport) *[]*HourReportLineLegacy {
|
|
|
|
|
func processReportToLines(report HourReportLegacy) *[]*HourReportLineLegacy {
|
|
|
|
|
lines := make([]*HourReportLineLegacy, 0, 250)
|
|
|
|
|
localTable := *report.Records
|
|
|
|
|
headersRaw := (localTable)[0]
|
|
|
|
|
@ -440,7 +476,7 @@ func (line DirectoryReportLine) ToQueryBlock() string {
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
//deprecated
|
|
|
|
|
func rename(report HourReport) {
|
|
|
|
|
func rename(report HourReportLegacy) {
|
|
|
|
|
outPathBase := "/home/dtookey/work/clarity-reporting/pcorrect"
|
|
|
|
|
fileName := path.Base(report.FilePath)
|
|
|
|
|
if namePattern.MatchString(fileName) {
|
|
|
|
|
|